cross merge fullstack_release_candidate into trunk

git-svn-id: https://hyracks.googlecode.com/svn/trunk/fullstack@3208 123451ca-8445-de46-9d55-352943316053
diff --git a/algebricks/algebricks-common/pom.xml b/algebricks/algebricks-common/pom.xml
index a5677a5..521ef12 100644
--- a/algebricks/algebricks-common/pom.xml
+++ b/algebricks/algebricks-common/pom.xml
@@ -16,8 +16,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/algebricks/algebricks-compiler/pom.xml b/algebricks/algebricks-compiler/pom.xml
index 8dc083d..bd35835 100644
--- a/algebricks/algebricks-compiler/pom.xml
+++ b/algebricks/algebricks-compiler/pom.xml
@@ -16,8 +16,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/AbstractCompilerFactoryBuilder.java b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/AbstractCompilerFactoryBuilder.java
index f1e7acb..dde4443 100644
--- a/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/AbstractCompilerFactoryBuilder.java
+++ b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/AbstractCompilerFactoryBuilder.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
 import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
 import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
@@ -44,6 +45,7 @@
     protected ITypeTraitProvider typeTraitProvider;
     protected ISerializerDeserializerProvider serializerDeserializerProvider;
     protected IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider;
+    protected IBinaryHashFunctionFamilyProvider hashFunctionFamilyProvider;
     protected IBinaryComparatorFactoryProvider comparatorFactoryProvider;
     protected IBinaryBooleanInspectorFactory binaryBooleanInspectorFactory;
     protected IBinaryIntegerInspectorFactory binaryIntegerInspectorFactory;
@@ -94,6 +96,14 @@
         return hashFunctionFactoryProvider;
     }
 
+    public void setHashFunctionFamilyProvider(IBinaryHashFunctionFamilyProvider hashFunctionFamilyProvider) {
+        this.hashFunctionFamilyProvider = hashFunctionFamilyProvider;
+    }
+
+    public IBinaryHashFunctionFamilyProvider getHashFunctionFamilyProvider() {
+        return hashFunctionFamilyProvider;
+    }
+
     public void setComparatorFactoryProvider(IBinaryComparatorFactoryProvider comparatorFactoryProvider) {
         this.comparatorFactoryProvider = comparatorFactoryProvider;
     }
diff --git a/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java
index 1d21463..edc1b66 100644
--- a/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java
+++ b/algebricks/algebricks-compiler/src/main/java/edu/uci/ics/hyracks/algebricks/compiler/api/HeuristicCompilerFactoryBuilder.java
@@ -84,11 +84,12 @@
                     public JobSpecification createJob(Object appContext) throws AlgebricksException {
                         AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Starting Job Generation.\n");
                         JobGenContext context = new JobGenContext(null, metadata, appContext,
-                                serializerDeserializerProvider, hashFunctionFactoryProvider, comparatorFactoryProvider,
-                                typeTraitProvider, binaryBooleanInspectorFactory, binaryIntegerInspectorFactory,
-                                printerProvider, nullWriterFactory, normalizedKeyComputerFactoryProvider,
-                                expressionRuntimeProvider, expressionTypeComputer, nullableTypeComputer, oc,
-                                expressionEvalSizeComputer, partialAggregationTypeComputer, frameSize, clusterLocations);
+                                serializerDeserializerProvider, hashFunctionFactoryProvider,
+                                hashFunctionFamilyProvider, comparatorFactoryProvider, typeTraitProvider,
+                                binaryBooleanInspectorFactory, binaryIntegerInspectorFactory, printerProvider,
+                                nullWriterFactory, normalizedKeyComputerFactoryProvider, expressionRuntimeProvider,
+                                expressionTypeComputer, nullableTypeComputer, oc, expressionEvalSizeComputer,
+                                partialAggregationTypeComputer, frameSize, clusterLocations);
                         PlanCompiler pc = new PlanCompiler(context);
                         return pc.compilePlan(plan, null);
                     }
diff --git a/algebricks/algebricks-core/pom.xml b/algebricks/algebricks-core/pom.xml
index a74a540..def5b35 100644
--- a/algebricks/algebricks-core/pom.xml
+++ b/algebricks/algebricks-core/pom.xml
@@ -16,8 +16,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
index 165fccd..6701385 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
@@ -89,4 +89,9 @@
     public IPhysicalPropertiesVector getDeliveredPhysicalProperties();
 
     public void computeDeliveredPhysicalProperties(IOptimizationContext context) throws AlgebricksException;
+    
+    /**
+     * Indicates whether the expressions used by this operator must be variable reference expressions.
+     */
+    public boolean requiresVariableReferenceExpressions();
 }
\ No newline at end of file
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IOptimizationContext.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IOptimizationContext.java
index 468d25c..0aa1ff6 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IOptimizationContext.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/IOptimizationContext.java
@@ -51,6 +51,8 @@
      * returns true if op1 and op2 have already been compared
      */
     public abstract boolean checkAndAddToAlreadyCompared(ILogicalOperator op1, ILogicalOperator op2);
+    
+    public abstract void removeFromAlreadyCompared(ILogicalOperator op1);
 
     public abstract void addNotToBeInlinedVar(LogicalVariable var);
 
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
index 5234d2c..b8bdf3e 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
@@ -20,6 +20,7 @@
     CLUSTER,
     DATASOURCESCAN,
     DISTINCT,
+    DISTRIBUTE_RESULT,
     GROUP,
     EMPTYTUPLESOURCE,
     EXCHANGE,
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
index a969372..32cfb9a 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
@@ -7,6 +7,7 @@
     BTREE_SEARCH,
     STATS,
     DATASOURCE_SCAN,
+    DISTRIBUTE_RESULT,
     EMPTY_TUPLE_SOURCE,
     EXTERNAL_GROUP_BY,
     IN_MEMORY_HASH_JOIN,
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ScalarFunctionCallExpression.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ScalarFunctionCallExpression.java
index bf3f82b..b284b22 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ScalarFunctionCallExpression.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/ScalarFunctionCallExpression.java
@@ -39,9 +39,11 @@
 
     @Override
     public ScalarFunctionCallExpression cloneExpression() {
-        cloneAnnotations();
         List<Mutable<ILogicalExpression>> clonedArgs = cloneArguments();
-        return new ScalarFunctionCallExpression(finfo, clonedArgs);
+        ScalarFunctionCallExpression funcExpr = new ScalarFunctionCallExpression(finfo, clonedArgs);
+        funcExpr.getAnnotations().putAll(cloneAnnotations());
+        funcExpr.setOpaqueParameters(this.getOpaqueParameters());
+        return funcExpr;
     }
 
     @Override
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/UnnestingFunctionCallExpression.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/UnnestingFunctionCallExpression.java
index 71932d8..652f9b0 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/UnnestingFunctionCallExpression.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/expressions/UnnestingFunctionCallExpression.java
@@ -45,6 +45,7 @@
         List<Mutable<ILogicalExpression>> clonedArgs = cloneArguments();
         UnnestingFunctionCallExpression ufce = new UnnestingFunctionCallExpression(finfo, clonedArgs);
         ufce.setReturnsUniqueValues(returnsUniqueValues);
+        ufce.setOpaqueParameters(this.getOpaqueParameters());
         return ufce;
     }
 
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java
index 899b633..82187e3 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/metadata/IMetadataProvider.java
@@ -51,6 +51,10 @@
             int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc)
             throws AlgebricksException;
 
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getResultHandleRuntime(IDataSink sink,
+            int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc, boolean ordered,
+            JobSpecification spec) throws AlgebricksException;
+
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getWriteResultRuntime(IDataSource<S> dataSource,
             IOperatorSchema propagatedSchema, List<LogicalVariable> keys, LogicalVariable payLoadVar,
             JobGenContext context, JobSpecification jobSpec) throws AlgebricksException;
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractLogicalOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractLogicalOperator.java
index dc0edfe..64dbdef 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractLogicalOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AbstractLogicalOperator.java
@@ -182,4 +182,9 @@
         return new PropagatingTypeEnvironment(ctx.getExpressionTypeComputer(), ctx.getNullableTypeComputer(),
                 ctx.getMetadataProvider(), TypePropagationPolicy.ALL, envPointers);
     }
+    
+    @Override
+    public boolean requiresVariableReferenceExpressions() {
+        return true;
+    }
 }
\ No newline at end of file
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
index a4dc2e0..4543997 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
@@ -89,4 +89,8 @@
         return env;
     }
 
+    @Override
+    public boolean requiresVariableReferenceExpressions() {
+        return false;
+    }
 }
\ No newline at end of file
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DataSourceScanOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DataSourceScanOperator.java
index 3227f3d..3c21c8f 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DataSourceScanOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DataSourceScanOperator.java
@@ -106,5 +106,4 @@
         }
         return env;
     }
-
 }
\ No newline at end of file
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DieOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DieOperator.java
index 20aa574..03bfcba 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DieOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DieOperator.java
@@ -81,4 +81,8 @@
         return createPropagatingAllInputsTypeEnvironment(ctx);
     }
 
+    @Override
+    public boolean requiresVariableReferenceExpressions() {
+        return false;
+    }
 }
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java
index ee0dcf6..b1da831 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java
@@ -28,6 +28,7 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

 import edu.uci.ics.hyracks.algebricks.core.algebra.properties.VariablePropagationPolicy;

 import edu.uci.ics.hyracks.algebricks.core.algebra.typing.ITypingContext;

+import edu.uci.ics.hyracks.algebricks.core.algebra.typing.NonPropagatingTypeEnvironment;

 import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;

 import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;

 

@@ -49,12 +50,34 @@
 

     @Override

     public void recomputeSchema() {

-        schema = new ArrayList<LogicalVariable>(inputs.get(0).getValue().getSchema());

+        if (schema == null) {

+            schema = new ArrayList<LogicalVariable>();

+        }

+        schema.clear();

+        for (Mutable<ILogicalExpression> eRef : expressions) {

+            ILogicalExpression e = eRef.getValue();

+            if (e.getExpressionTag() == LogicalExpressionTag.VARIABLE) {

+                VariableReferenceExpression v = (VariableReferenceExpression) e;

+                schema.add(v.getVariableReference());

+            }

+        }

     }

 

     @Override

     public VariablePropagationPolicy getVariablePropagationPolicy() {

-        return VariablePropagationPolicy.ALL;

+        return new VariablePropagationPolicy() {

+            @Override

+            public void propagateVariables(IOperatorSchema target, IOperatorSchema... sources)

+                    throws AlgebricksException {

+                for (Mutable<ILogicalExpression> eRef : expressions) {

+                    ILogicalExpression e = eRef.getValue();

+                    if (e.getExpressionTag() == LogicalExpressionTag.VARIABLE) {

+                        VariableReferenceExpression v = (VariableReferenceExpression) e;

+                        target.addVariable(v.getVariableReference());

+                    }

+                }

+            }

+        };

     }

 

     @Override

@@ -105,7 +128,16 @@
 

     @Override

     public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {

-        return createPropagatingAllInputsTypeEnvironment(ctx);

+        IVariableTypeEnvironment env = new NonPropagatingTypeEnvironment(ctx.getExpressionTypeComputer(), ctx.getMetadataProvider());

+        IVariableTypeEnvironment childEnv = ctx.getOutputTypeEnvironment(inputs.get(0).getValue());

+        for (Mutable<ILogicalExpression> exprRef : expressions) {

+            ILogicalExpression expr = exprRef.getValue();

+            if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {

+                VariableReferenceExpression varRefExpr = (VariableReferenceExpression) expr;

+                env.setVarType(varRefExpr.getVariableReference(), childEnv.getType(expr));

+            }

+        }

+        return env;

     }

 

 }

diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistributeResultOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistributeResultOperator.java
new file mode 100644
index 0000000..6ca6d87
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistributeResultOperator.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSink;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.VariablePropagationPolicy;
+import edu.uci.ics.hyracks.algebricks.core.algebra.typing.ITypingContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
+
+public class DistributeResultOperator extends AbstractLogicalOperator {
+    private List<Mutable<ILogicalExpression>> expressions;
+    private IDataSink dataSink;
+
+    public DistributeResultOperator(List<Mutable<ILogicalExpression>> expressions, IDataSink dataSink) {
+        this.expressions = expressions;
+        this.dataSink = dataSink;
+    }
+
+    public List<Mutable<ILogicalExpression>> getExpressions() {
+        return expressions;
+    }
+
+    public IDataSink getDataSink() {
+        return dataSink;
+    }
+
+    @Override
+    public LogicalOperatorTag getOperatorTag() {
+        return LogicalOperatorTag.DISTRIBUTE_RESULT;
+    }
+
+    @Override
+    public <R, T> R accept(ILogicalOperatorVisitor<R, T> visitor, T arg) throws AlgebricksException {
+        return visitor.visitDistributeResultOperator(this, arg);
+    }
+
+    @Override
+    public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform visitor) throws AlgebricksException {
+        boolean modif = false;
+        for (int i = 0; i < expressions.size(); i++) {
+            boolean b = visitor.transform(expressions.get(i));
+            if (b) {
+                modif = true;
+            }
+        }
+        return modif;
+    }
+
+    @Override
+    public VariablePropagationPolicy getVariablePropagationPolicy() {
+        return VariablePropagationPolicy.ALL;
+    }
+
+    @Override
+    public boolean isMap() {
+        return false; // actually depends on the physical op.
+    }
+
+    @Override
+    public void recomputeSchema() {
+        schema = new ArrayList<LogicalVariable>();
+        schema.addAll(inputs.get(0).getValue().getSchema());
+    }
+
+    @Override
+    public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
+        return createPropagatingAllInputsTypeEnvironment(ctx);
+    }
+
+}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExtensionOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExtensionOperator.java
index 101b6f5..5aa858f 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExtensionOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/ExtensionOperator.java
@@ -51,7 +51,7 @@
 
     @Override
     public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform) throws AlgebricksException {
-        return false;
+        return delegate.acceptExpressionTransform(transform);
     }
 
     @Override
@@ -108,4 +108,13 @@
         return this.createPropagatingAllInputsTypeEnvironment(ctx);
     }
 
+    @Override
+    public String toString() {
+        return delegate.toString();
+    }
+    
+    public IOperatorExtension getDelegate() {
+        return delegate;
+    }
+
 }
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorExtension.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorExtension.java
index 98c3301..0a80337 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorExtension.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/IOperatorExtension.java
@@ -14,6 +14,7 @@
  */
 package edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical;
 
+import java.util.Collection;
 import java.util.List;
 
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -42,5 +43,8 @@
     void setPhysicalOperator(IPhysicalOperator physicalOperator);
 
     ExecutionMode getExecutionMode();
-
+    
+    public void getUsedVariables(Collection<LogicalVariable> usedVars);
+    
+    public void getProducedVariables(Collection<LogicalVariable> producedVars);
 }
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LimitOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LimitOperator.java
index 3c6a699..8e67ed9 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LimitOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/LimitOperator.java
@@ -109,5 +109,4 @@
     public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
         return createPropagatingAllInputsTypeEnvironment(ctx);
     }
-
 }
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SelectOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SelectOperator.java
index 8c611b0..fda920a 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SelectOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/SelectOperator.java
@@ -102,4 +102,8 @@
         return env;
     }
 
+    @Override
+    public boolean requiresVariableReferenceExpressions() {
+        return false;
+    }
 }
\ No newline at end of file
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
index 0539cbe..1b4be1e 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
@@ -47,8 +47,10 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
@@ -64,7 +66,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
@@ -476,6 +477,14 @@
     }
 
     @Override
+    public Void visitDistributeResultOperator(DistributeResultOperator op, IOptimizationContext ctx)
+            throws AlgebricksException {
+        // propagateFDsAndEquivClasses(op, ctx);
+        setEmptyFDsEqClasses(op, ctx);
+        return null;
+    }
+
+    @Override
     public Void visitWriteResultOperator(WriteResultOperator op, IOptimizationContext ctx) throws AlgebricksException {
         // propagateFDsAndEquivClasses(op, ctx);
         setEmptyFDsEqClasses(op, ctx);
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
index 31061db..ac6d887 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
@@ -38,6 +38,7 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
@@ -345,7 +346,7 @@
         AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
         if (aop.getOperatorTag() != LogicalOperatorTag.UNNEST_MAP)
             return Boolean.FALSE;
-        UnnestOperator unnestOpArg = (UnnestOperator) copyAndSubstituteVar(op, arg);
+        UnnestMapOperator unnestOpArg = (UnnestMapOperator) copyAndSubstituteVar(op, arg);
         boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getVariables(), unnestOpArg.getVariables());
         if (!isomorphic)
             return Boolean.FALSE;
@@ -425,6 +426,17 @@
     }
 
     @Override
+    public Boolean visitDistributeResultOperator(DistributeResultOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.DISTRIBUTE_RESULT)
+            return Boolean.FALSE;
+        DistributeResultOperator writeOpArg = (DistributeResultOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = VariableUtilities.varListEqualUnordered(op.getSchema(), writeOpArg.getSchema());
+        return isomorphic;
+    }
+
+    @Override
     public Boolean visitWriteResultOperator(WriteResultOperator op, ILogicalOperator arg) throws AlgebricksException {
         AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
         if (aop.getOperatorTag() != LogicalOperatorTag.WRITE_RESULT)
@@ -762,6 +774,14 @@
         }
 
         @Override
+        public ILogicalOperator visitDistributeResultOperator(DistributeResultOperator op, Void arg)
+                throws AlgebricksException {
+            ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            deepCopyExpressionRefs(newExpressions, op.getExpressions());
+            return new DistributeResultOperator(newExpressions, op.getDataSink());
+        }
+
+        @Override
         public ILogicalOperator visitWriteResultOperator(WriteResultOperator op, Void arg) throws AlgebricksException {
             ArrayList<Mutable<ILogicalExpression>> newKeyExpressions = new ArrayList<Mutable<ILogicalExpression>>();
             deepCopyExpressionRefs(newKeyExpressions, op.getKeyExpressions());
@@ -784,8 +804,8 @@
             deepCopyExpressionRefs(newPrimaryKeyExpressions, op.getPrimaryKeyExpressions());
             List<Mutable<ILogicalExpression>> newSecondaryKeyExpressions = new ArrayList<Mutable<ILogicalExpression>>();
             deepCopyExpressionRefs(newSecondaryKeyExpressions, op.getSecondaryKeyExpressions());
-            Mutable<ILogicalExpression> newFilterExpression = new MutableObject<ILogicalExpression>(((AbstractLogicalExpression)op.getFilterExpression())
-                    .cloneExpression());
+            Mutable<ILogicalExpression> newFilterExpression = new MutableObject<ILogicalExpression>(
+                    ((AbstractLogicalExpression) op.getFilterExpression()).cloneExpression());
             return new IndexInsertDeleteOperator(op.getDataSourceIndex(), newPrimaryKeyExpressions,
                     newSecondaryKeyExpressions, newFilterExpression, op.getOperation());
         }
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
index 562bb4c..b9544c7 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
@@ -37,8 +37,10 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
@@ -54,7 +56,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
@@ -230,6 +231,13 @@
     }
 
     @Override
+    public Void visitDistributeResultOperator(DistributeResultOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        mapVariablesStandard(op, arg);
+        return null;
+    }
+
+    @Override
     public Void visitWriteResultOperator(WriteResultOperator op, ILogicalOperator arg) throws AlgebricksException {
         mapVariablesStandard(op, arg);
         return null;
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
index 9b2f5a0..8f1d686 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
@@ -29,8 +29,10 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
@@ -46,7 +48,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
@@ -240,6 +241,12 @@
     }
 
     @Override
+    public Void visitDistributeResultOperator(DistributeResultOperator op, IOptimizationContext arg)
+            throws AlgebricksException {
+        return null;
+    }
+
+    @Override
     public Void visitWriteResultOperator(WriteResultOperator op, IOptimizationContext arg) throws AlgebricksException {
         // TODO Auto-generated method stub
         return null;
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
index 994c6cb..31adcba 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
@@ -32,8 +32,10 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;

@@ -49,7 +51,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;

-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;

@@ -223,6 +224,11 @@
     }

 

     @Override

+    public Void visitDistributeResultOperator(DistributeResultOperator op, Void arg) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

     public Void visitWriteResultOperator(WriteResultOperator op, Void arg) throws AlgebricksException {

         return null;

     }

@@ -249,6 +255,7 @@
 

     @Override

     public Void visitExtensionOperator(ExtensionOperator op, Void arg) throws AlgebricksException {

+        op.getDelegate().getProducedVariables(producedVariables);

         return null;

     }

 }

diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
index 9295179..cd0cee3 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
@@ -31,8 +31,10 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;

@@ -48,7 +50,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;

-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;

@@ -85,7 +86,13 @@
 

     @Override

     public Void visitDistinctOperator(DistinctOperator op, Void arg) throws AlgebricksException {

-        standardLayout(op);

+        for (Mutable<ILogicalExpression> exprRef : op.getExpressions()) {

+            ILogicalExpression expr = exprRef.getValue();

+            if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {

+                VariableReferenceExpression varRefExpr = (VariableReferenceExpression) expr;

+                schemaVariables.add(varRefExpr.getVariableReference());

+            }

+        }

         return null;

     }

 

@@ -232,6 +239,12 @@
     }

 

     @Override

+    public Void visitDistributeResultOperator(DistributeResultOperator op, Void arg) throws AlgebricksException {

+        standardLayout(op);

+        return null;

+    }

+

+    @Override

     public Void visitWriteResultOperator(WriteResultOperator op, Void arg) throws AlgebricksException {

         standardLayout(op);

         return null;

diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
index 11e56ca..69fb3f8 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
@@ -33,8 +33,10 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;

@@ -51,7 +53,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;

-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;

@@ -335,6 +336,16 @@
     }

 

     @Override

+    public Void visitDistributeResultOperator(DistributeResultOperator op, Pair<LogicalVariable, LogicalVariable> pair)

+            throws AlgebricksException {

+        for (Mutable<ILogicalExpression> e : op.getExpressions()) {

+            e.getValue().substituteVar(pair.first, pair.second);

+        }

+        substVarTypes(op, pair);

+        return null;

+    }

+

+    @Override

     public Void visitWriteResultOperator(WriteResultOperator op, Pair<LogicalVariable, LogicalVariable> pair)

             throws AlgebricksException {

         op.getPayloadExpression().getValue().substituteVar(pair.first, pair.second);

diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
index 3a82ccd..5361a19 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
@@ -25,14 +25,17 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

 import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IPhysicalOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;

@@ -49,13 +52,17 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;

-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;

 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.HashPartitionExchangePOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.HashPartitionMergeExchangePOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.RangePartitionPOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.SortMergeExchangePOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.OrderColumn;

 import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;

 

 public class UsedVariableVisitor implements ILogicalOperatorVisitor<Void, Void> {

@@ -106,8 +113,49 @@
     }

 

     @Override

-    public Void visitExchangeOperator(ExchangeOperator op, Void arg) {

-        // does not use any variable

+    public Void visitExchangeOperator(ExchangeOperator op, Void arg) throws AlgebricksException {

+        // Used variables depend on the physical operator.

+        if (op.getPhysicalOperator() != null) {

+            IPhysicalOperator physOp = op.getPhysicalOperator();

+            switch (physOp.getOperatorTag()) {

+                case BROADCAST_EXCHANGE:

+                case ONE_TO_ONE_EXCHANGE:

+                case RANDOM_MERGE_EXCHANGE: {

+                    // No variables used.

+                    break;

+                }

+                case HASH_PARTITION_EXCHANGE: {

+                    HashPartitionExchangePOperator concreteOp = (HashPartitionExchangePOperator) physOp;

+                    usedVariables.addAll(concreteOp.getHashFields());

+                    break;

+                }

+                case HASH_PARTITION_MERGE_EXCHANGE: {

+                    HashPartitionMergeExchangePOperator concreteOp = (HashPartitionMergeExchangePOperator) physOp;

+                    usedVariables.addAll(concreteOp.getPartitionFields());

+                    for (OrderColumn orderCol : concreteOp.getOrderColumns()) {

+                        usedVariables.add(orderCol.getColumn());

+                    }

+                    break;

+                }

+                case SORT_MERGE_EXCHANGE: {

+                    SortMergeExchangePOperator concreteOp = (SortMergeExchangePOperator) physOp;

+                    for (OrderColumn orderCol : concreteOp.getSortColumns()) {

+                        usedVariables.add(orderCol.getColumn());

+                    }

+                    break;

+                }

+                case RANGE_PARTITION_EXCHANGE: {

+                    RangePartitionPOperator concreteOp = (RangePartitionPOperator) physOp;

+                    for (OrderColumn partCol : concreteOp.getPartitioningFields()) {

+                        usedVariables.add(partCol.getColumn());

+                    }

+                    break;

+                }

+                default: {

+                    throw new AlgebricksException("Unhandled physical operator tag '" + physOp.getOperatorTag() + "'.");

+                }

+            }

+        }

         return null;

     }

 

@@ -257,6 +305,14 @@
     }

 

     @Override

+    public Void visitDistributeResultOperator(DistributeResultOperator op, Void arg) {

+        for (Mutable<ILogicalExpression> expr : op.getExpressions()) {

+            expr.getValue().getUsedVariables(usedVariables);

+        }

+        return null;

+    }

+

+    @Override

     public Void visitWriteResultOperator(WriteResultOperator op, Void arg) {

         op.getPayloadExpression().getValue().getUsedVariables(usedVariables);

         for (Mutable<ILogicalExpression> e : op.getKeyExpressions()) {

@@ -297,6 +353,7 @@
 

     @Override

     public Void visitExtensionOperator(ExtensionOperator op, Void arg) throws AlgebricksException {

+        op.getDelegate().getUsedVariables(usedVariables);

         return null;

     }

 

diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
index 25f22e7..f66f99b 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
@@ -61,14 +61,22 @@
             ITypingContext ctx) throws AlgebricksException {

         substituteVariables(op, v1, v2, true, ctx);

     }

-

+    

+    public static void substituteVariablesInDescendantsAndSelf(ILogicalOperator op, LogicalVariable v1,

+            LogicalVariable v2, ITypingContext ctx) throws AlgebricksException {

+        for (Mutable<ILogicalOperator> childOp : op.getInputs()) {

+            substituteVariablesInDescendantsAndSelf(childOp.getValue(), v1, v2, ctx);

+        }

+        substituteVariables(op, v1, v2, true, ctx);

+    }

+    

     public static void substituteVariables(ILogicalOperator op, LogicalVariable v1, LogicalVariable v2,

             boolean goThroughNts, ITypingContext ctx) throws AlgebricksException {

         ILogicalOperatorVisitor<Void, Pair<LogicalVariable, LogicalVariable>> visitor = new SubstituteVariableVisitor(

                 goThroughNts, ctx);

         op.accept(visitor, new Pair<LogicalVariable, LogicalVariable>(v1, v2));

     }

-

+    

     public static <T> boolean varListEqualUnordered(List<T> var, List<T> varArg) {

         Set<T> varSet = new HashSet<T>();

         Set<T> varArgSet = new HashSet<T>();

diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java
new file mode 100644
index 0000000..302d4d2
--- /dev/null
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSink;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class DistributeResultPOperator extends AbstractPhysicalOperator {
+
+    @Override
+    public PhysicalOperatorTag getOperatorTag() {
+        return PhysicalOperatorTag.DISTRIBUTE_RESULT;
+    }
+
+    @Override
+    public boolean isMicroOperator() {
+        return false;
+    }
+
+    @Override
+    public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
+        ILogicalOperator op2 = op.getInputs().get(0).getValue();
+        deliveredProperties = op2.getDeliveredPhysicalProperties().clone();
+    }
+
+    @Override
+    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
+            IPhysicalPropertiesVector reqdByParent) {
+        DistributeResultOperator write = (DistributeResultOperator) op;
+        IDataSink sink = write.getDataSink();
+        IPartitioningProperty pp = sink.getPartitioningProperty();
+        StructuralPropertiesVector[] r = new StructuralPropertiesVector[] { new StructuralPropertiesVector(pp, null) };
+        return new PhysicalRequirements(r, IPartitioningRequirementsCoordinator.NO_COORDINATION);
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
+            IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
+            throws AlgebricksException {
+        DistributeResultOperator resultOp = (DistributeResultOperator) op;
+        IMetadataProvider mp = context.getMetadataProvider();
+
+        JobSpecification spec = builder.getJobSpec();
+
+        int[] columns = new int[resultOp.getExpressions().size()];
+        int i = 0;
+        for (Mutable<ILogicalExpression> exprRef : resultOp.getExpressions()) {
+            ILogicalExpression expr = exprRef.getValue();
+            if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+                throw new NotImplementedException("Only writing variable expressions is supported.");
+            }
+            VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
+            LogicalVariable v = varRef.getVariableReference();
+            columns[i++] = inputSchemas[0].findVariable(v);
+        }
+        RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
+                context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
+
+        IPrinterFactory[] pf = JobGenHelper.mkPrinterFactories(inputSchemas[0], context.getTypeEnvironment(op),
+                context, columns);
+
+        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getResultHandleRuntime(
+                resultOp.getDataSink(), columns, pf, inputDesc, false, spec);
+
+        builder.contributeHyracksOperator(resultOp, runtimeAndConstraints.first);
+        builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
+        ILogicalOperator src = resultOp.getInputs().get(0).getValue();
+        builder.contributeGraphEdge(src, 0, resultOp, 0);
+    }
+}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
index f3c9e5a..61d4880 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
@@ -158,5 +158,13 @@
                 comparatorFactories);
         return new Pair<IConnectorDescriptor, TargetConstraint>(conn, null);
     }
+    
+    public List<LogicalVariable> getPartitionFields() {
+        return partitionFields;
+    }
+    
+    public List<OrderColumn> getOrderColumns() {
+        return orderColumns;
+    }
 
 }
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
index c737cc4..6da42b4 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
@@ -32,14 +32,21 @@
 import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
 import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
 import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
 import edu.uci.ics.hyracks.dataflow.std.join.HybridHashJoinOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor;
 
 public class HybridHashJoinPOperator extends AbstractHashJoinPOperator {
 
@@ -90,6 +97,8 @@
         IVariableTypeEnvironment env = context.getTypeEnvironment(op);
         IBinaryHashFunctionFactory[] hashFunFactories = JobGenHelper.variablesToBinaryHashFunctionFactories(
                 keysLeftBranch, env, context);
+        IBinaryHashFunctionFamily[] hashFunFamilies = JobGenHelper.variablesToBinaryHashFunctionFamilies(
+                keysLeftBranch, env, context);
         IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keysLeft.length];
         int i = 0;
         IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
@@ -97,33 +106,75 @@
             Object t = env.getVarType(v);
             comparatorFactories[i++] = bcfp.getBinaryComparatorFactory(t, true);
         }
-        RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
+        RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
+                propagatedSchema, context);
         IOperatorDescriptorRegistry spec = builder.getJobSpec();
         IOperatorDescriptor opDesc = null;
-        try {
-            switch (kind) {
-                case INNER: {
-                    opDesc = new HybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
-                            maxInputBuildSizeInFrames, aveRecordsPerFrame, getFudgeFactor(), keysLeft, keysRight,
-                            hashFunFactories, comparatorFactories, recDescriptor);
-                    break;
-                }
-                case LEFT_OUTER: {
-                    INullWriterFactory[] nullWriterFactories = new INullWriterFactory[inputSchemas[1].getSize()];
-                    for (int j = 0; j < nullWriterFactories.length; j++) {
-                        nullWriterFactories[j] = context.getNullWriterFactory();
-                    }
-                    opDesc = new HybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
-                            maxInputBuildSizeInFrames, aveRecordsPerFrame, getFudgeFactor(), keysLeft, keysRight,
-                            hashFunFactories, comparatorFactories, recDescriptor, true, nullWriterFactories);
-                    break;
-                }
-                default: {
-                    throw new NotImplementedException();
-                }
+
+        boolean optimizedHashJoin = true;
+        for (IBinaryHashFunctionFamily family : hashFunFamilies) {
+            if (family == null) {
+                optimizedHashJoin = false;
+                break;
             }
-        } catch (HyracksDataException e) {
-            throw new AlgebricksException(e);
+        }
+
+        if (!optimizedHashJoin) {
+            try {
+                switch (kind) {
+                    case INNER: {
+                        opDesc = new HybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
+                                maxInputBuildSizeInFrames, aveRecordsPerFrame, getFudgeFactor(), keysLeft, keysRight,
+                                hashFunFactories, comparatorFactories, recDescriptor);
+                        break;
+                    }
+                    case LEFT_OUTER: {
+                        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[inputSchemas[1].getSize()];
+                        for (int j = 0; j < nullWriterFactories.length; j++) {
+                            nullWriterFactories[j] = context.getNullWriterFactory();
+                        }
+                        opDesc = new HybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
+                                maxInputBuildSizeInFrames, aveRecordsPerFrame, getFudgeFactor(), keysLeft, keysRight,
+                                hashFunFactories, comparatorFactories, recDescriptor, true, nullWriterFactories);
+                        break;
+                    }
+                    default: {
+                        throw new NotImplementedException();
+                    }
+                }
+            } catch (HyracksDataException e) {
+                throw new AlgebricksException(e);
+            }
+        } else {
+            try {
+                switch (kind) {
+                    case INNER: {
+                        opDesc = new OptimizedHybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
+                                maxInputBuildSizeInFrames, getFudgeFactor(), keysLeft, keysRight, hashFunFamilies,
+                                comparatorFactories, recDescriptor, new JoinMultiComparatorFactory(comparatorFactories,
+                                        keysLeft, keysRight), new JoinMultiComparatorFactory(comparatorFactories,
+                                        keysRight, keysLeft));
+                        break;
+                    }
+                    case LEFT_OUTER: {
+                        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[inputSchemas[1].getSize()];
+                        for (int j = 0; j < nullWriterFactories.length; j++) {
+                            nullWriterFactories[j] = context.getNullWriterFactory();
+                        }
+                        opDesc = new OptimizedHybridHashJoinOperatorDescriptor(spec, getMemSizeInFrames(),
+                                maxInputBuildSizeInFrames, getFudgeFactor(), keysLeft, keysRight, hashFunFamilies,
+                                comparatorFactories, recDescriptor, new JoinMultiComparatorFactory(comparatorFactories,
+                                        keysLeft, keysRight), new JoinMultiComparatorFactory(comparatorFactories,
+                                        keysRight, keysLeft), true, nullWriterFactories);
+                        break;
+                    }
+                    default: {
+                        throw new NotImplementedException();
+                    }
+                }
+            } catch (HyracksDataException e) {
+                throw new AlgebricksException(e);
+            }
         }
         contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc);
 
@@ -140,3 +191,72 @@
     }
 
 }
+
+/**
+ * {@ ITuplePairComparatorFactory} implementation for optimized hybrid hash join.
+ */
+class JoinMultiComparatorFactory implements ITuplePairComparatorFactory {
+    private static final long serialVersionUID = 1L;
+
+    private final IBinaryComparatorFactory[] binaryComparatorFactories;
+    private final int[] keysLeft;
+    private final int[] keysRight;
+
+    public JoinMultiComparatorFactory(IBinaryComparatorFactory[] binaryComparatorFactory, int[] keysLeft,
+            int[] keysRight) {
+        this.binaryComparatorFactories = binaryComparatorFactory;
+        this.keysLeft = keysLeft;
+        this.keysRight = keysRight;
+    }
+
+    @Override
+    public ITuplePairComparator createTuplePairComparator(IHyracksTaskContext ctx) {
+        IBinaryComparator[] binaryComparators = new IBinaryComparator[binaryComparatorFactories.length];
+        for (int i = 0; i < binaryComparators.length; i++) {
+            binaryComparators[i] = binaryComparatorFactories[i].createBinaryComparator();
+        }
+        return new JoinMultiComparator(binaryComparators, keysLeft, keysRight);
+    }
+}
+
+/**
+ * {@ ITuplePairComparator} implementation for optimized hybrid hash join.
+ * The comparator applies multiple binary comparators, one for each key pairs
+ */
+class JoinMultiComparator implements ITuplePairComparator {
+    private final IBinaryComparator[] binaryComparators;
+    private final int[] keysLeft;
+    private final int[] keysRight;
+
+    public JoinMultiComparator(IBinaryComparator[] bComparator, int[] keysLeft, int[] keysRight) {
+        this.binaryComparators = bComparator;
+        this.keysLeft = keysLeft;
+        this.keysRight = keysRight;
+    }
+
+    @Override
+    public int compare(IFrameTupleAccessor accessor0, int tIndex0, IFrameTupleAccessor accessor1, int tIndex1) {
+        int tStart0 = accessor0.getTupleStartOffset(tIndex0);
+        int fStartOffset0 = accessor0.getFieldSlotsLength() + tStart0;
+
+        int tStart1 = accessor1.getTupleStartOffset(tIndex1);
+        int fStartOffset1 = accessor1.getFieldSlotsLength() + tStart1;
+
+        for (int i = 0; i < binaryComparators.length; i++) {
+            int fStart0 = accessor0.getFieldStartOffset(tIndex0, keysLeft[i]);
+            int fEnd0 = accessor0.getFieldEndOffset(tIndex0, keysLeft[i]);
+            int fLen0 = fEnd0 - fStart0;
+
+            int fStart1 = accessor1.getFieldStartOffset(tIndex1, keysRight[i]);
+            int fEnd1 = accessor1.getFieldEndOffset(tIndex1, keysRight[i]);
+            int fLen1 = fEnd1 - fStart1;
+
+            int c = binaryComparators[i].compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0,
+                    accessor1.getBuffer().array(), fStart1 + fStartOffset1, fLen1);
+            if (c != 0) {
+                return c;
+            }
+        }
+        return 0;
+    }
+}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NLJoinPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NLJoinPOperator.java
index 8cbd2d8..d153f90 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NLJoinPOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/NLJoinPOperator.java
@@ -44,6 +44,7 @@
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
@@ -56,8 +57,8 @@
 import edu.uci.ics.hyracks.dataflow.std.join.NestedLoopJoinOperatorDescriptor;
 
 /**
- * Left input is broadcast and preserves its local properties.
- * Right input can be partitioned in any way.
+ * Left input is broadcast and preserves its local properties. Right input can
+ * be partitioned in any way.
  */
 public class NLJoinPOperator extends AbstractJoinPOperator {
 
@@ -97,7 +98,7 @@
                 pp = pv1.getPartitioningProperty();
             }
         } else {
-        	pp = IPartitioningProperty.UNPARTITIONED;
+            pp = IPartitioningProperty.UNPARTITIONED;
         }
 
         List<ILocalStructuralProperty> localProps = new LinkedList<ILocalStructuralProperty>();
@@ -122,7 +123,8 @@
             IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
             throws AlgebricksException {
         AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op;
-        RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
+        RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
+                propagatedSchema, context);
         IOperatorSchema[] conditionInputSchemas = new IOperatorSchema[1];
         conditionInputSchemas[0] = propagatedSchema;
         IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
@@ -135,10 +137,19 @@
 
         switch (kind) {
             case INNER: {
-                opDesc = new NestedLoopJoinOperatorDescriptor(spec, comparatorFactory, recDescriptor, memSize);
+                opDesc = new NestedLoopJoinOperatorDescriptor(spec, comparatorFactory, recDescriptor, memSize, false,
+                        null);
                 break;
             }
-            case LEFT_OUTER:
+            case LEFT_OUTER: {
+                INullWriterFactory[] nullWriterFactories = new INullWriterFactory[inputSchemas[1].getSize()];
+                for (int j = 0; j < nullWriterFactories.length; j++) {
+                    nullWriterFactories[j] = context.getNullWriterFactory();
+                }
+                opDesc = new NestedLoopJoinOperatorDescriptor(spec, comparatorFactory, recDescriptor, memSize, true,
+                        nullWriterFactories);
+                break;
+            }
             default: {
                 throw new NotImplementedException();
             }
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RangePartitionPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RangePartitionPOperator.java
index 7e3c935..8875f6c 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RangePartitionPOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/RangePartitionPOperator.java
@@ -16,6 +16,7 @@
 
 import java.util.ArrayList;
 import java.util.LinkedList;
+import java.util.List;
 
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
@@ -69,5 +70,9 @@
             ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException {
         throw new NotImplementedException();
     }
+    
+    public List<OrderColumn> getPartitioningFields() {
+        return partitioningFields;
+    }
 
 }
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
index a94c78e..fc0c433 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
@@ -30,8 +30,10 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
@@ -48,7 +50,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
@@ -164,6 +165,13 @@
     }
 
     @Override
+    public String visitDistributeResultOperator(DistributeResultOperator op, Integer indent) {
+        StringBuilder buffer = new StringBuilder();
+        addIndent(buffer, indent).append("distribute result ").append(op.getExpressions());
+        return buffer.toString();
+    }
+
+    @Override
     public String visitWriteResultOperator(WriteResultOperator op, Integer indent) {
         StringBuilder buffer = new StringBuilder();
         addIndent(buffer, indent).append("load ").append(op.getDataSource()).append(" from ")
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
index 6b5949e..23dac2a 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
@@ -20,8 +20,10 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
@@ -37,7 +39,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
@@ -97,6 +98,8 @@
 
     public R visitWriteOperator(WriteOperator op, T arg) throws AlgebricksException;
 
+    public R visitDistributeResultOperator(DistributeResultOperator op, T arg) throws AlgebricksException;
+
     public R visitWriteResultOperator(WriteResultOperator op, T arg) throws AlgebricksException;
 
     public R visitInsertDeleteOperator(InsertDeleteOperator op, T tag) throws AlgebricksException;
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenContext.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenContext.java
index 22a1a81..365d1a5 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenContext.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenContext.java
@@ -34,6 +34,7 @@
 import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
 import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
 import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
@@ -42,151 +43,167 @@
 import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
 
 public class JobGenContext {
-    private final IOperatorSchema outerFlowSchema;
-    private final Map<ILogicalOperator, IOperatorSchema> schemaMap = new HashMap<ILogicalOperator, IOperatorSchema>();
-    private final ISerializerDeserializerProvider serializerDeserializerProvider;
-    private final IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider;
-    private final IBinaryComparatorFactoryProvider comparatorFactoryProvider;
-    private final IPrinterFactoryProvider printerFactoryProvider;
-    private final ITypeTraitProvider typeTraitProvider;
-    private final IMetadataProvider<?, ?> metadataProvider;
-    private final INullWriterFactory nullWriterFactory;
-    private final INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider;
-    private final Object appContext;
-    private final IBinaryBooleanInspectorFactory booleanInspectorFactory;
-    private final IBinaryIntegerInspectorFactory integerInspectorFactory;
-    private final IExpressionRuntimeProvider expressionRuntimeProvider;
-    private final IExpressionTypeComputer expressionTypeComputer;
-    private final IExpressionEvalSizeComputer expressionEvalSizeComputer;
-    private final IPartialAggregationTypeComputer partialAggregationTypeComputer;
-    private final int frameSize;
-    private AlgebricksPartitionConstraint clusterLocations;
-    private int varCounter;
-    private final ITypingContext typingContext;
+	private final IOperatorSchema outerFlowSchema;
+	private final Map<ILogicalOperator, IOperatorSchema> schemaMap = new HashMap<ILogicalOperator, IOperatorSchema>();
+	private final ISerializerDeserializerProvider serializerDeserializerProvider;
+	private final IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider;
+	private final IBinaryHashFunctionFamilyProvider hashFunctionFamilyProvider;
+	private final IBinaryComparatorFactoryProvider comparatorFactoryProvider;
+	private final IPrinterFactoryProvider printerFactoryProvider;
+	private final ITypeTraitProvider typeTraitProvider;
+	private final IMetadataProvider<?, ?> metadataProvider;
+	private final INullWriterFactory nullWriterFactory;
+	private final INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider;
+	private final Object appContext;
+	private final IBinaryBooleanInspectorFactory booleanInspectorFactory;
+	private final IBinaryIntegerInspectorFactory integerInspectorFactory;
+	private final IExpressionRuntimeProvider expressionRuntimeProvider;
+	private final IExpressionTypeComputer expressionTypeComputer;
+	private final IExpressionEvalSizeComputer expressionEvalSizeComputer;
+	private final IPartialAggregationTypeComputer partialAggregationTypeComputer;
+	private final int frameSize;
+	private AlgebricksPartitionConstraint clusterLocations;
+	private int varCounter;
+	private final ITypingContext typingContext;
 
-    public JobGenContext(IOperatorSchema outerFlowSchema, IMetadataProvider<?, ?> metadataProvider, Object appContext,
-            ISerializerDeserializerProvider serializerDeserializerProvider,
-            IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider,
-            IBinaryComparatorFactoryProvider comparatorFactoryProvider, ITypeTraitProvider typeTraitProvider,
-            IBinaryBooleanInspectorFactory booleanInspectorFactory,
-            IBinaryIntegerInspectorFactory integerInspectorFactory, IPrinterFactoryProvider printerFactoryProvider,
-            INullWriterFactory nullWriterFactory,
-            INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider,
-            IExpressionRuntimeProvider expressionRuntimeProvider, IExpressionTypeComputer expressionTypeComputer,
-            INullableTypeComputer nullableTypeComputer, ITypingContext typingContext,
-            IExpressionEvalSizeComputer expressionEvalSizeComputer,
-            IPartialAggregationTypeComputer partialAggregationTypeComputer, int frameSize,
-            AlgebricksPartitionConstraint clusterLocations) {
-        this.outerFlowSchema = outerFlowSchema;
-        this.metadataProvider = metadataProvider;
-        this.appContext = appContext;
-        this.serializerDeserializerProvider = serializerDeserializerProvider;
-        this.hashFunctionFactoryProvider = hashFunctionFactoryProvider;
-        this.comparatorFactoryProvider = comparatorFactoryProvider;
-        this.typeTraitProvider = typeTraitProvider;
-        this.booleanInspectorFactory = booleanInspectorFactory;
-        this.integerInspectorFactory = integerInspectorFactory;
-        this.printerFactoryProvider = printerFactoryProvider;
-        this.clusterLocations = clusterLocations;
-        this.normalizedKeyComputerFactoryProvider = normalizedKeyComputerFactoryProvider;
-        this.nullWriterFactory = nullWriterFactory;
-        this.expressionRuntimeProvider = expressionRuntimeProvider;
-        this.expressionTypeComputer = expressionTypeComputer;
-        this.typingContext = typingContext;
-        this.expressionEvalSizeComputer = expressionEvalSizeComputer;
-        this.partialAggregationTypeComputer = partialAggregationTypeComputer;
-        this.frameSize = frameSize;
-        this.varCounter = 0;
-    }
+	public JobGenContext(
+			IOperatorSchema outerFlowSchema,
+			IMetadataProvider<?, ?> metadataProvider,
+			Object appContext,
+			ISerializerDeserializerProvider serializerDeserializerProvider,
+			IBinaryHashFunctionFactoryProvider hashFunctionFactoryProvider,
+			IBinaryHashFunctionFamilyProvider hashFunctionFamilyProvider,
+			IBinaryComparatorFactoryProvider comparatorFactoryProvider,
+			ITypeTraitProvider typeTraitProvider,
+			IBinaryBooleanInspectorFactory booleanInspectorFactory,
+			IBinaryIntegerInspectorFactory integerInspectorFactory,
+			IPrinterFactoryProvider printerFactoryProvider,
+			INullWriterFactory nullWriterFactory,
+			INormalizedKeyComputerFactoryProvider normalizedKeyComputerFactoryProvider,
+			IExpressionRuntimeProvider expressionRuntimeProvider,
+			IExpressionTypeComputer expressionTypeComputer,
+			INullableTypeComputer nullableTypeComputer,
+			ITypingContext typingContext,
+			IExpressionEvalSizeComputer expressionEvalSizeComputer,
+			IPartialAggregationTypeComputer partialAggregationTypeComputer,
+			int frameSize, AlgebricksPartitionConstraint clusterLocations) {
+		this.outerFlowSchema = outerFlowSchema;
+		this.metadataProvider = metadataProvider;
+		this.appContext = appContext;
+		this.serializerDeserializerProvider = serializerDeserializerProvider;
+		this.hashFunctionFactoryProvider = hashFunctionFactoryProvider;
+		this.hashFunctionFamilyProvider = hashFunctionFamilyProvider;
+		this.comparatorFactoryProvider = comparatorFactoryProvider;
+		this.typeTraitProvider = typeTraitProvider;
+		this.booleanInspectorFactory = booleanInspectorFactory;
+		this.integerInspectorFactory = integerInspectorFactory;
+		this.printerFactoryProvider = printerFactoryProvider;
+		this.clusterLocations = clusterLocations;
+		this.normalizedKeyComputerFactoryProvider = normalizedKeyComputerFactoryProvider;
+		this.nullWriterFactory = nullWriterFactory;
+		this.expressionRuntimeProvider = expressionRuntimeProvider;
+		this.expressionTypeComputer = expressionTypeComputer;
+		this.typingContext = typingContext;
+		this.expressionEvalSizeComputer = expressionEvalSizeComputer;
+		this.partialAggregationTypeComputer = partialAggregationTypeComputer;
+		this.frameSize = frameSize;
+		this.varCounter = 0;
+	}
 
-    public IOperatorSchema getOuterFlowSchema() {
-        return outerFlowSchema;
-    }
+	public IOperatorSchema getOuterFlowSchema() {
+		return outerFlowSchema;
+	}
 
-    public AlgebricksPartitionConstraint getClusterLocations() {
-        return clusterLocations;
-    }
+	public AlgebricksPartitionConstraint getClusterLocations() {
+		return clusterLocations;
+	}
 
-    public IMetadataProvider<?, ?> getMetadataProvider() {
-        return metadataProvider;
-    }
+	public IMetadataProvider<?, ?> getMetadataProvider() {
+		return metadataProvider;
+	}
 
-    public Object getAppContext() {
-        return appContext;
-    }
+	public Object getAppContext() {
+		return appContext;
+	}
 
-    public ISerializerDeserializerProvider getSerializerDeserializerProvider() {
-        return serializerDeserializerProvider;
-    }
+	public ISerializerDeserializerProvider getSerializerDeserializerProvider() {
+		return serializerDeserializerProvider;
+	}
 
-    public IBinaryHashFunctionFactoryProvider getBinaryHashFunctionFactoryProvider() {
-        return hashFunctionFactoryProvider;
-    }
+	public IBinaryHashFunctionFactoryProvider getBinaryHashFunctionFactoryProvider() {
+		return hashFunctionFactoryProvider;
+	}
 
-    public IBinaryComparatorFactoryProvider getBinaryComparatorFactoryProvider() {
-        return comparatorFactoryProvider;
-    }
+	public IBinaryHashFunctionFamilyProvider getBinaryHashFunctionFamilyProvider() {
+		return hashFunctionFamilyProvider;
+	}
 
-    public ITypeTraitProvider getTypeTraitProvider() {
-        return typeTraitProvider;
-    }
+	public IBinaryComparatorFactoryProvider getBinaryComparatorFactoryProvider() {
+		return comparatorFactoryProvider;
+	}
 
-    public IBinaryBooleanInspectorFactory getBinaryBooleanInspectorFactory() {
-        return booleanInspectorFactory;
-    }
+	public ITypeTraitProvider getTypeTraitProvider() {
+		return typeTraitProvider;
+	}
 
-    public IBinaryIntegerInspectorFactory getBinaryIntegerInspectorFactory() {
-        return integerInspectorFactory;
-    }
+	public IBinaryBooleanInspectorFactory getBinaryBooleanInspectorFactory() {
+		return booleanInspectorFactory;
+	}
 
-    public IPrinterFactoryProvider getPrinterFactoryProvider() {
-        return printerFactoryProvider;
-    }
+	public IBinaryIntegerInspectorFactory getBinaryIntegerInspectorFactory() {
+		return integerInspectorFactory;
+	}
 
-    public IExpressionRuntimeProvider getExpressionRuntimeProvider() {
-        return expressionRuntimeProvider;
-    }
+	public IPrinterFactoryProvider getPrinterFactoryProvider() {
+		return printerFactoryProvider;
+	}
 
-    public IOperatorSchema getSchema(ILogicalOperator op) {
-        return schemaMap.get(op);
-    }
+	public IExpressionRuntimeProvider getExpressionRuntimeProvider() {
+		return expressionRuntimeProvider;
+	}
 
-    public void putSchema(ILogicalOperator op, IOperatorSchema schema) {
-        schemaMap.put(op, schema);
-    }
+	public IOperatorSchema getSchema(ILogicalOperator op) {
+		return schemaMap.get(op);
+	}
 
-    public LogicalVariable createNewVar() {
-        varCounter++;
-        LogicalVariable var = new LogicalVariable(-varCounter);
-        return var;
-    }
+	public void putSchema(ILogicalOperator op, IOperatorSchema schema) {
+		schemaMap.put(op, schema);
+	}
 
-    public Object getType(ILogicalExpression expr, IVariableTypeEnvironment env) throws AlgebricksException {
-        return expressionTypeComputer.getType(expr, typingContext.getMetadataProvider(), env);
-    }
+	public LogicalVariable createNewVar() {
+		varCounter++;
+		LogicalVariable var = new LogicalVariable(-varCounter);
+		return var;
+	}
 
-    public INullWriterFactory getNullWriterFactory() {
-        return nullWriterFactory;
-    }
+	public Object getType(ILogicalExpression expr, IVariableTypeEnvironment env)
+			throws AlgebricksException {
+		return expressionTypeComputer.getType(expr,
+				typingContext.getMetadataProvider(), env);
+	}
 
-    public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider() {
-        return normalizedKeyComputerFactoryProvider;
-    }
+	public INullWriterFactory getNullWriterFactory() {
+		return nullWriterFactory;
+	}
 
-    public IExpressionEvalSizeComputer getExpressionEvalSizeComputer() {
-        return expressionEvalSizeComputer;
-    }
+	public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider() {
+		return normalizedKeyComputerFactoryProvider;
+	}
 
-    public int getFrameSize() {
-        return frameSize;
-    }
+	public IExpressionEvalSizeComputer getExpressionEvalSizeComputer() {
+		return expressionEvalSizeComputer;
+	}
 
-    public IPartialAggregationTypeComputer getPartialAggregationTypeComputer() {
-        return partialAggregationTypeComputer;
-    }
+	public int getFrameSize() {
+		return frameSize;
+	}
 
-    public IVariableTypeEnvironment getTypeEnvironment(ILogicalOperator op) {
-        return typingContext.getOutputTypeEnvironment(op);
-    }
+	public IPartialAggregationTypeComputer getPartialAggregationTypeComputer() {
+		return partialAggregationTypeComputer;
+	}
+
+	public IVariableTypeEnvironment getTypeEnvironment(ILogicalOperator op) {
+		return typingContext.getOutputTypeEnvironment(op);
+	}
 
 }
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
index 790fb93..530d19c 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
@@ -24,6 +24,7 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
 import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
 import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
 import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
@@ -31,6 +32,7 @@
 import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
 import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
@@ -42,8 +44,8 @@
 
     @SuppressWarnings("rawtypes")
     public static RecordDescriptor mkRecordDescriptor(IVariableTypeEnvironment env, IOperatorSchema opSchema,
-            JobGenContext context) throws AlgebricksException {        
-		ISerializerDeserializer[] fields = new ISerializerDeserializer[opSchema.getSize()];
+            JobGenContext context) throws AlgebricksException {
+        ISerializerDeserializer[] fields = new ISerializerDeserializer[opSchema.getSize()];
         ITypeTraits[] typeTraits = new ITypeTraits[opSchema.getSize()];
         ISerializerDeserializerProvider sdp = context.getSerializerDeserializerProvider();
         ITypeTraitProvider ttp = context.getTypeTraitProvider();
@@ -59,7 +61,7 @@
         }
         return new RecordDescriptor(fields, typeTraits);
     }
-    
+
     public static IPrinterFactory[] mkPrinterFactories(IOperatorSchema opSchema, IVariableTypeEnvironment env,
             JobGenContext context, int[] printColumns) throws AlgebricksException {
         IPrinterFactory[] pf = new IPrinterFactory[printColumns.length];
@@ -94,7 +96,20 @@
         }
         return funFactories;
     }
-    
+
+    public static IBinaryHashFunctionFamily[] variablesToBinaryHashFunctionFamilies(
+            Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
+            throws AlgebricksException {
+        IBinaryHashFunctionFamily[] funFamilies = new IBinaryHashFunctionFamily[varLogical.size()];
+        int i = 0;
+        IBinaryHashFunctionFamilyProvider bhffProvider = context.getBinaryHashFunctionFamilyProvider();
+        for (LogicalVariable var : varLogical) {
+            Object type = env.getVarType(var);
+            funFamilies[i++] = bhffProvider.getBinaryHashFunctionFamily(type);
+        }
+        return funFamilies;
+    }
+
     public static IBinaryComparatorFactory[] variablesToAscBinaryComparatorFactories(
             Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
             throws AlgebricksException {
@@ -107,15 +122,14 @@
         }
         return compFactories;
     }
-    
-    public static IBinaryComparatorFactory[] variablesToAscBinaryComparatorFactories(
-            List<LogicalVariable> varLogical, int start, int size, IVariableTypeEnvironment env, JobGenContext context)
-            throws AlgebricksException {
+
+    public static IBinaryComparatorFactory[] variablesToAscBinaryComparatorFactories(List<LogicalVariable> varLogical,
+            int start, int size, IVariableTypeEnvironment env, JobGenContext context) throws AlgebricksException {
         IBinaryComparatorFactory[] compFactories = new IBinaryComparatorFactory[size];
         IBinaryComparatorFactoryProvider bcfProvider = context.getBinaryComparatorFactoryProvider();
         for (int i = 0; i < size; i++) {
-                Object type = env.getVarType(varLogical.get(start + i));
-                compFactories[i] = bcfProvider.getBinaryComparatorFactory(type, true);
+            Object type = env.getVarType(varLogical.get(start + i));
+            compFactories[i] = bcfProvider.getBinaryComparatorFactory(type, true);
         }
         return compFactories;
     }
@@ -144,15 +158,14 @@
         }
         return typeTraits;
     }
-    
-    public static ITypeTraits[] variablesToTypeTraits(
-            List<LogicalVariable> varLogical, int start, int size, IVariableTypeEnvironment env, JobGenContext context)
-            throws AlgebricksException {
+
+    public static ITypeTraits[] variablesToTypeTraits(List<LogicalVariable> varLogical, int start, int size,
+            IVariableTypeEnvironment env, JobGenContext context) throws AlgebricksException {
         ITypeTraits[] typeTraits = new ITypeTraits[size];
         ITypeTraitProvider typeTraitProvider = context.getTypeTraitProvider();
         for (int i = 0; i < size; i++) {
-                Object type = env.getVarType(varLogical.get(start + i));
-                typeTraits[i] = typeTraitProvider.getTypeTrait(type);
+            Object type = env.getVarType(varLogical.get(start + i));
+            typeTraits[i] = typeTraitProvider.getTypeTrait(type);
         }
         return typeTraits;
     }
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
index bbe8fb3..63a6852 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
@@ -54,6 +54,8 @@
         operatorVisitedToParents.clear();
         builder.buildSpec(rootOps);
         spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        // Do not do activity cluster planning because it is slow on large clusters
+        spec.setUseConnectorPolicyForScheduling(false);
         return spec;
     }
 
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AlgebricksOptimizationContext.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AlgebricksOptimizationContext.java
index 7c63e01..738fc7f 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AlgebricksOptimizationContext.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/AlgebricksOptimizationContext.java
@@ -135,6 +135,7 @@
     /*
      * returns true if op1 and op2 have already been compared
      */
+    @Override
     public boolean checkAndAddToAlreadyCompared(ILogicalOperator op1, ILogicalOperator op2) {
         HashSet<ILogicalOperator> ops = alreadyCompared.get(op1);
         if (ops == null) {
@@ -151,6 +152,11 @@
             }
         }
     }
+    
+    @Override
+    public void removeFromAlreadyCompared(ILogicalOperator op1) {
+        alreadyCompared.remove(op1);
+    }
 
     public void addNotToBeInlinedVar(LogicalVariable var) {
         notToBeInlinedVars.add(var);
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
index 9ce910b..fc6c198 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/rewriter/base/PhysicalOptimizationConfig.java
@@ -17,8 +17,8 @@
     public PhysicalOptimizationConfig() {
         int frameSize = 32768;
         setInt(FRAMESIZE, frameSize);
-        setInt(MAX_FRAMES_EXTERNAL_SORT, (int) (((long) 512 * MB) / frameSize));
-        setInt(MAX_FRAMES_EXTERNAL_GROUP_BY, (int) (((long) 256 * MB) / frameSize));
+        setInt(MAX_FRAMES_EXTERNAL_SORT, (int) (((long) 32 * MB) / frameSize));
+        setInt(MAX_FRAMES_EXTERNAL_GROUP_BY, (int) (((long) 32 * MB) / frameSize));
 
         // use http://www.rsok.com/~jrm/printprimes.html to find prime numbers
         setInt(DEFAULT_HASH_GROUP_TABLE_SIZE, 10485767);
diff --git a/algebricks/algebricks-data/pom.xml b/algebricks/algebricks-data/pom.xml
index 3d927d9..9536416 100644
--- a/algebricks/algebricks-data/pom.xml
+++ b/algebricks/algebricks-data/pom.xml
@@ -16,8 +16,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
new file mode 100644
index 0000000..8a992b3
--- /dev/null
+++ b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.algebricks.data;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+
+public interface IBinaryHashFunctionFamilyProvider {
+
+	public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type)
+			throws AlgebricksException;
+}
diff --git a/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IResultSerializerFactoryProvider.java b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IResultSerializerFactoryProvider.java
new file mode 100644
index 0000000..5e62612
--- /dev/null
+++ b/algebricks/algebricks-data/src/main/java/edu/uci/ics/hyracks/algebricks/data/IResultSerializerFactoryProvider.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.data;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IResultSerializerFactory;
+
+public interface IResultSerializerFactoryProvider extends Serializable {
+    /**
+     * Returns a result serializer factory
+     * 
+     * @param fields
+     *            - A position of the fields in the order it should be written in the output.
+     * @param printerFactories
+     *            - A printer factory array to print the tuple containing different fields.
+     * @param writerFactory
+     *            - A writer factory to write the serialized data to the print stream.
+     * @param inputRecordDesc
+     *            - The record descriptor describing the input frame to be serialized.
+     * @return A new instance of result serialized appender.
+     */
+    public IResultSerializerFactory getAqlResultSerializerFactoryProvider(int[] fields,
+            IPrinterFactory[] printerFactories, IAWriterFactory writerFactory);
+}
diff --git a/algebricks/algebricks-examples/piglet-example/pom.xml b/algebricks/algebricks-examples/piglet-example/pom.xml
index 954938a..ca7467b 100644
--- a/algebricks/algebricks-examples/piglet-example/pom.xml
+++ b/algebricks/algebricks-examples/piglet-example/pom.xml
@@ -16,8 +16,9 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
 				</configuration>
 			</plugin>
 			<plugin>
diff --git a/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletCompiler.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletCompiler.java
index d105759..2981157 100644
--- a/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletCompiler.java
+++ b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/compiler/PigletCompiler.java
@@ -127,9 +127,9 @@
             }
         });
         builder.setTypeTraitProvider(new ITypeTraitProvider() {
-			public ITypeTraits getTypeTrait(Object type) {
-				return null;
-			}
+            public ITypeTraits getTypeTrait(Object type) {
+                return null;
+            }
         });
         builder.setPrinterProvider(PigletPrinterFactoryProvider.INSTANCE);
         builder.setExpressionRuntimeProvider(new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(
diff --git a/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java
index d678803..15b290e 100644
--- a/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java
+++ b/algebricks/algebricks-examples/piglet-example/src/main/java/edu/uci/ics/hyracks/algebricks/examples/piglet/metadata/PigletMetadataProvider.java
@@ -145,6 +145,13 @@
     }
 
     @Override
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getResultHandleRuntime(IDataSink sink,
+            int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc, boolean ordered,
+            JobSpecification spec) throws AlgebricksException {
+        return null;
+    }
+
+    @Override
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getWriteResultRuntime(
             IDataSource<String> dataSource, IOperatorSchema propagatedSchema, List<LogicalVariable> keys,
             LogicalVariable payLoadVar, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException {
diff --git a/algebricks/algebricks-rewriter/pom.xml b/algebricks/algebricks-rewriter/pom.xml
index 41979d3..7968773 100644
--- a/algebricks/algebricks-rewriter/pom.xml
+++ b/algebricks/algebricks-rewriter/pom.xml
@@ -16,8 +16,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexJoinInferenceRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexJoinInferenceRule.java
index 4f6699a..3daf85d 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexJoinInferenceRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexJoinInferenceRule.java
@@ -69,7 +69,7 @@
         VariableUtilities.getUsedVariables(op, varsUsedInUnnest);
 
         HashSet<LogicalVariable> producedInSubplan = new HashSet<LogicalVariable>();
-        VariableUtilities.getLiveVariables(subplan, producedInSubplan);
+        VariableUtilities.getProducedVariables(subplan, producedInSubplan);
 
         if (!producedInSubplan.containsAll(varsUsedInUnnest)) {
             return false;
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
index 4521d1a..fa5000e 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
@@ -149,9 +149,15 @@
             // Trivially joinable.
             return true;
         }
-        if (!belowSecondUnnest && op.getOperatorTag() == LogicalOperatorTag.SUBPLAN) {
-            // Bail on subplan.
-            return false;
+        if (!belowSecondUnnest) {
+            // Bail on the following operators.
+            switch (op.getOperatorTag()) {
+                case AGGREGATE:
+                case SUBPLAN:
+                case GROUP:
+                case UNNEST_MAP:
+                    return false;
+            }
         }
         switch (op.getOperatorTag()) {
             case UNNEST:
@@ -211,7 +217,8 @@
                 for (LogicalVariable producedVar : producedVars) {
                     if (outerUsedVars.contains(producedVar)) {
                         outerMatches++;
-                    } else if (innerUsedVars.contains(producedVar)) {
+                    }
+                    if (innerUsedVars.contains(producedVar)) {
                         innerMatches++;
                     }
                 }
@@ -221,24 +228,30 @@
                     // All produced vars used by outer partition.
                     outerOps.add(op);
                     targetUsedVars = outerUsedVars;
-                } else if (innerMatches == producedVars.size() && !producedVars.isEmpty()) {
+                }
+                if (innerMatches == producedVars.size() && !producedVars.isEmpty()) {
                     // All produced vars used by inner partition.
                     innerOps.add(op);
                     targetUsedVars = innerUsedVars;
-                } else if (innerMatches == 0 && outerMatches == 0) {
+                }
+                if (innerMatches == 0 && outerMatches == 0) {
                     // Op produces variables that are not used in the part of the plan we've seen (or it doesn't produce any vars).
                     // Try to figure out where it belongs by analyzing the used variables.
                     List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
                     VariableUtilities.getUsedVariables(op, usedVars);
                     for (LogicalVariable usedVar : usedVars) {
+                        boolean canBreak = false;
                         if (outerUsedVars.contains(usedVar)) {
                             outerOps.add(op);
                             targetUsedVars = outerUsedVars;
-                            break;
+                            canBreak = true;
                         }
                         if (innerUsedVars.contains(usedVar)) {
                             innerOps.add(op);
                             targetUsedVars = innerUsedVars;
+                            canBreak = true;
+                        }
+                        if (canBreak) {
                             break;
                         }
                     }
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
new file mode 100644
index 0000000..f017e0f
--- /dev/null
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
@@ -0,0 +1,439 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.rewriter.rules;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Factors out common sub-expressions by assigning them to a variables, and replacing the common sub-expressions with references to those variables.
+ *
+ * Preconditions/Assumptions:
+ * Assumes no projects are in the plan. This rule ignores variable reference expressions and constants (other rules deal with those separately).
+ * 
+ * Postconditions/Examples:
+ * Plan with extracted sub-expressions. Generates one assign operator per extracted expression.
+ * 
+ * Example 1 - Simple Arithmetic Example (simplified)
+ * 
+ * Before plan:
+ * assign [$$1] <- [5 + 6 - 10]
+ *   assign [$$0] <- [5 + 6 + 30]
+ * 
+ * After plan:
+ * assign [$$1] <- [$$5 - 10]
+ *   assign [$$0] <- [$$5 + 30]
+ *     assign [$$5] <- [5 + 6]
+ * 
+ * Example 2 - Cleaning up 'Distinct By' (simplified)
+ * 
+ * Before plan: (notice how $$0 is not live after the distinct)
+ * assign [$$3] <- [field-access($$0, 1)]
+ *   distinct ([%0->$$5])
+ *     assign [$$5] <- [field-access($$0, 1)]
+ *       unnest $$0 <- [scan-dataset]
+ * 
+ * After plan: (notice how the issue of $$0 is fixed)
+ * assign [$$3] <- [$$5]
+ *   distinct ([$$5])
+ *     assign [$$5] <- [field-access($$0, 1)]
+ *       unnest $$0 <- [scan-dataset]
+ * 
+ * Example 3 - Pulling Common Expressions Above Joins (simplified)
+ * 
+ * Before plan:
+ * assign [$$9] <- funcZ(funcY($$8))
+ *   join (funcX(funcY($$8)))
+ * 
+ * After plan:
+ * assign [$$9] <- funcZ($$10))
+ *   select (funcX($$10))
+ *     assign [$$10] <- [funcY($$8)]
+ *       join (TRUE)
+ */
+public class ExtractCommonExpressionsRule implements IAlgebraicRewriteRule {
+
+    private final List<ILogicalExpression> originalAssignExprs = new ArrayList<ILogicalExpression>();
+    
+    private final CommonExpressionSubstitutionVisitor substVisitor = new CommonExpressionSubstitutionVisitor();
+    private final Map<ILogicalExpression, ExprEquivalenceClass> exprEqClassMap = new HashMap<ILogicalExpression, ExprEquivalenceClass>();
+    
+    // Set of operators for which common subexpression elimination should not be performed.
+    private static final Set<LogicalOperatorTag> ignoreOps = new HashSet<LogicalOperatorTag>();
+    static {
+        ignoreOps.add(LogicalOperatorTag.UNNEST);
+        ignoreOps.add(LogicalOperatorTag.UNNEST_MAP);
+        ignoreOps.add(LogicalOperatorTag.ORDER);
+        ignoreOps.add(LogicalOperatorTag.PROJECT);
+        ignoreOps.add(LogicalOperatorTag.AGGREGATE);
+        ignoreOps.add(LogicalOperatorTag.RUNNINGAGGREGATE);
+    }
+    
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        exprEqClassMap.clear();
+        substVisitor.setContext(context);
+        boolean modified = removeCommonExpressions(opRef, context);
+        if (modified) {
+            context.computeAndSetTypeEnvironmentForOperator(opRef.getValue());
+        }
+        return modified;
+    }
+
+    private void updateEquivalenceClassMap(LogicalVariable lhs, Mutable<ILogicalExpression> rhsExprRef, ILogicalExpression rhsExpr, ILogicalOperator op) {
+        ExprEquivalenceClass exprEqClass = exprEqClassMap.get(rhsExpr);
+        if (exprEqClass == null) {
+            exprEqClass = new ExprEquivalenceClass(op, rhsExprRef);
+            exprEqClassMap.put(rhsExpr, exprEqClass);
+        }
+        exprEqClass.setVariable(lhs);
+    }
+
+    private boolean removeCommonExpressions(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (context.checkIfInDontApplySet(this, opRef.getValue())) {
+            return false;
+        }
+        
+        boolean modified = false;
+        // Recurse into children.
+        for (Mutable<ILogicalOperator> inputOpRef : op.getInputs()) {
+            if (removeCommonExpressions(inputOpRef, context)) {
+                modified = true;
+            }
+        }
+        
+        // TODO: Deal with replicate properly. Currently, we just clear the expr equivalence map, since we want to avoid incorrect expression replacement
+        // (the resulting new variables should be assigned live below a replicate).
+        if (op.getOperatorTag() == LogicalOperatorTag.REPLICATE) {
+            exprEqClassMap.clear();
+            return modified;
+        }
+        // Exclude these operators.
+        if (ignoreOps.contains(op.getOperatorTag())) {
+            return modified;
+        }
+        
+        // Remember a copy of the original assign expressions, so we can add them to the equivalence class map
+        // after replacing expressions within the assign operator itself.
+        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            AssignOperator assignOp = (AssignOperator) op;
+            originalAssignExprs.clear();
+            int numVars = assignOp.getVariables().size();
+            for (int i = 0; i < numVars; i++) {
+                Mutable<ILogicalExpression> exprRef = assignOp.getExpressions().get(i);
+                ILogicalExpression expr = exprRef.getValue();
+                originalAssignExprs.add(expr.cloneExpression());
+            }
+        }
+        
+        // Perform common subexpression elimination.
+        substVisitor.setOperator(op);
+        if (op.acceptExpressionTransform(substVisitor)) {
+            modified = true;
+        }
+        
+        // Update equivalence class map.
+        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            AssignOperator assignOp = (AssignOperator) op;
+            int numVars = assignOp.getVariables().size();
+            for (int i = 0; i < numVars; i++) {
+                Mutable<ILogicalExpression> exprRef = assignOp.getExpressions().get(i);
+                ILogicalExpression expr = exprRef.getValue();
+                if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE
+                        || expr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+                    continue;
+                }
+                // Update equivalence class map.
+                LogicalVariable lhs = assignOp.getVariables().get(i);
+                updateEquivalenceClassMap(lhs, exprRef, exprRef.getValue(), op);
+                
+                // Update equivalence class map with original assign expression.
+                updateEquivalenceClassMap(lhs, exprRef, originalAssignExprs.get(i), op);
+            }
+        }
+
+        // TODO: For now do not perform replacement in nested plans
+        // due to the complication of figuring out whether the firstOp in an equivalence class is within a subplan, 
+        // and the resulting variable will not be visible to the outside.
+        // Since subplans should be eliminated in most cases, this behavior is acceptable for now.
+        /*
+        if (op.hasNestedPlans()) {
+            AbstractOperatorWithNestedPlans opWithNestedPlan = (AbstractOperatorWithNestedPlans) op;
+            for (ILogicalPlan nestedPlan : opWithNestedPlan.getNestedPlans()) {
+                for (Mutable<ILogicalOperator> rootRef : nestedPlan.getRoots()) {
+                    if (removeCommonExpressions(rootRef, context)) {
+                        modified = true;
+                    }
+                }
+            }
+        }
+        */
+
+        if (modified) {
+            context.computeAndSetTypeEnvironmentForOperator(op);
+            context.addToDontApplySet(this, op);
+        }
+        return modified;
+    }
+
+    private class CommonExpressionSubstitutionVisitor implements ILogicalExpressionReferenceTransform {
+                
+        private final Set<LogicalVariable> liveVars = new HashSet<LogicalVariable>();
+        private final List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
+        private IOptimizationContext context;
+        private ILogicalOperator op;        
+        
+        public void setContext(IOptimizationContext context) {
+            this.context = context;
+        }
+        
+        public void setOperator(ILogicalOperator op) throws AlgebricksException {
+            this.op = op;
+            liveVars.clear();
+            usedVars.clear();
+        }
+        
+        @Override
+        public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
+            if (liveVars.isEmpty() && usedVars.isEmpty()) {
+                VariableUtilities.getLiveVariables(op, liveVars);
+                VariableUtilities.getUsedVariables(op, usedVars);
+            }
+            
+            AbstractLogicalExpression expr = (AbstractLogicalExpression) exprRef.getValue();
+            boolean modified = false;
+            ExprEquivalenceClass exprEqClass = exprEqClassMap.get(expr);
+            if (exprEqClass != null) {
+                // Replace common subexpression with existing variable. 
+                if (exprEqClass.variableIsSet()) {
+                    // Check if the replacing variable is live at this op.
+                    // However, if the op is already using variables that are not live, then a replacement may enable fixing the plan.
+                    // This behavior is necessary to, e.g., properly deal with distinct by.
+                    // Also just replace the expr if we are replacing common exprs from within the same operator.
+                    if (liveVars.contains(exprEqClass.getVariable()) || !liveVars.containsAll(usedVars)
+                            || op == exprEqClass.getFirstOperator()) {
+                        exprRef.setValue(new VariableReferenceExpression(exprEqClass.getVariable()));
+                        // Do not descend into children since this expr has been completely replaced.
+                        return true;
+                    }
+                } else {
+                    if (assignCommonExpression(exprEqClass, expr)) {
+                        exprRef.setValue(new VariableReferenceExpression(exprEqClass.getVariable()));
+                        // Do not descend into children since this expr has been completely replaced.
+                        return true;
+                    }
+                }
+            } else {
+                if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE
+                        && expr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+                    exprEqClass = new ExprEquivalenceClass(op, exprRef);
+                    exprEqClassMap.put(expr, exprEqClass);
+                }
+            }
+            
+            // Descend into function arguments.
+            if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+                for (Mutable<ILogicalExpression> arg : funcExpr.getArguments()) {
+                    if (transform(arg)) {
+                        modified = true;
+                    }
+                }
+            }
+            return modified;
+        }
+        
+        private boolean assignCommonExpression(ExprEquivalenceClass exprEqClass, ILogicalExpression expr) throws AlgebricksException {
+            AbstractLogicalOperator firstOp = (AbstractLogicalOperator) exprEqClass.getFirstOperator();
+            Mutable<ILogicalExpression> firstExprRef = exprEqClass.getFirstExpression();
+            if (firstOp.getOperatorTag() == LogicalOperatorTag.INNERJOIN || firstOp.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
+                // Do not extract common expressions from within the same join operator.
+                if (firstOp == op) {
+                    return false;
+                }
+                AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) firstOp;
+                Mutable<ILogicalExpression> joinCond = joinOp.getCondition();                
+                ILogicalExpression enclosingExpr = getEnclosingExpression(joinCond, firstExprRef.getValue());
+                if (enclosingExpr == null) {
+                    // No viable enclosing expression that we can pull out from the join.
+                    return false;
+                }
+                // Place a Select operator beneath op that contains the enclosing expression.
+                SelectOperator selectOp = new SelectOperator(new MutableObject<ILogicalExpression>(enclosingExpr));
+                selectOp.getInputs().add(new MutableObject<ILogicalOperator>(op.getInputs().get(0).getValue()));
+                op.getInputs().get(0).setValue(selectOp);
+                // Set firstOp to be the select below op, since we want to assign the common subexpr there.
+                firstOp = (AbstractLogicalOperator) selectOp;
+            } else if (firstOp.getInputs().size() > 1) { 
+                // Bail for any non-join operator with multiple inputs.
+                return false;
+            }                        
+            LogicalVariable newVar = context.newVar();
+            AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(firstExprRef.getValue().cloneExpression()));            
+            // Place assign below firstOp.
+            newAssign.getInputs().add(new MutableObject<ILogicalOperator>(firstOp.getInputs().get(0).getValue()));
+            newAssign.setExecutionMode(firstOp.getExecutionMode());
+            firstOp.getInputs().get(0).setValue(newAssign);
+            // Replace original expr with variable reference, and set var in expression equivalence class.
+            firstExprRef.setValue(new VariableReferenceExpression(newVar));
+            exprEqClass.setVariable(newVar);
+            context.computeAndSetTypeEnvironmentForOperator(newAssign);
+            context.computeAndSetTypeEnvironmentForOperator(firstOp);
+            return true;
+        }
+
+        private ILogicalExpression getEnclosingExpression(Mutable<ILogicalExpression> conditionExprRef, ILogicalExpression commonSubExpr) {
+            ILogicalExpression conditionExpr = conditionExprRef.getValue();
+            if (conditionExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                return null;
+            }
+            if (isEqJoinCondition(commonSubExpr)) {
+                // Do not eliminate the common expression if we could use it for an equi-join.
+                return null;
+            }
+            AbstractFunctionCallExpression conditionFuncExpr = (AbstractFunctionCallExpression) conditionExpr;
+            // Boolean expression that encloses the common subexpression.
+            ILogicalExpression enclosingBoolExpr = null;
+            // We are not dealing with arbitrarily nested and/or expressions here.
+            FunctionIdentifier funcIdent = conditionFuncExpr.getFunctionIdentifier();
+            if (funcIdent.equals(AlgebricksBuiltinFunctions.AND) || funcIdent.equals(AlgebricksBuiltinFunctions.OR)) {
+                Iterator<Mutable<ILogicalExpression>> argIter = conditionFuncExpr.getArguments().iterator();
+                while (argIter.hasNext()) {
+                    Mutable<ILogicalExpression> argRef = argIter.next();
+                    if (containsExpr(argRef.getValue(), commonSubExpr)) {
+                        enclosingBoolExpr = argRef.getValue();
+                        // Remove the enclosing expression from the argument list.
+                        // We are going to pull it out into a new select operator.
+                        argIter.remove();
+                        break;
+                    }
+                }
+                // If and/or only has a single argument left, pull it out and remove the and/or function.
+                if (conditionFuncExpr.getArguments().size() == 1) {
+                    conditionExprRef.setValue(conditionFuncExpr.getArguments().get(0).getValue());
+                }
+            } else {
+                if (!containsExpr(conditionExprRef.getValue(), commonSubExpr)) {
+                    return null;
+                }
+                enclosingBoolExpr = conditionFuncExpr;
+                // Replace the enclosing expression with TRUE.
+                conditionExprRef.setValue(ConstantExpression.TRUE);
+            }
+            return enclosingBoolExpr;
+        }
+    }
+    
+    private boolean containsExpr(ILogicalExpression expr, ILogicalExpression searchExpr) {
+        if (expr == searchExpr) {
+            return true;
+        }
+        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        for (Mutable<ILogicalExpression> argRef : funcExpr.getArguments()) {
+            if (containsExpr(argRef.getValue(), searchExpr)) {
+                return true;
+            }
+        }
+        return false;
+    }
+    
+    private boolean isEqJoinCondition(ILogicalExpression expr) {
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        if (funcExpr.getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.EQ)) {
+            ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
+            ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
+            if (arg1.getExpressionTag() == LogicalExpressionTag.VARIABLE
+                    && arg2.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                return true;
+            }
+        }
+        return false;
+    }
+    
+    private final class ExprEquivalenceClass {
+        // First operator in which expression is used.
+        private final ILogicalOperator firstOp;
+        
+        // Reference to expression in first op.
+        private final Mutable<ILogicalExpression> firstExprRef;
+        
+        // Variable that this expression has been assigned to.
+        private LogicalVariable var;
+        
+        public ExprEquivalenceClass(ILogicalOperator firstOp, Mutable<ILogicalExpression> firstExprRef) {
+            this.firstOp = firstOp;
+            this.firstExprRef = firstExprRef;
+        }
+        
+        public ILogicalOperator getFirstOperator() {
+            return firstOp;
+        }
+        
+        public Mutable<ILogicalExpression> getFirstExpression() {
+            return firstExprRef;
+        }
+        
+        public void setVariable(LogicalVariable var) {
+            this.var = var;
+        }
+        
+        public LogicalVariable getVariable() {
+            return var;
+        }
+        
+        public boolean variableIsSet() {
+            return var != null;
+        }
+    }
+}
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java
new file mode 100644
index 0000000..df8ddda
--- /dev/null
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.rewriter.rules;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+
+/**
+ * Inlines variables that are referenced exactly once.
+ * 
+ * Preconditions/Assumptions:
+ * Assumes no projects are in the plan.
+ * 
+ * Example assuming variable $$3 is referenced exactly once.
+ * 
+ * Before plan:
+ * select (funcA($$3))
+ *   ...
+ *     assign [$$3] <- [field-access($$0, 1)]
+ * 
+ * After plan:
+ * select (funcA(field-access($$0, 1))
+ *   ...
+ *     assign [] <- []
+ */
+public class InlineSingleReferenceVariablesRule extends InlineVariablesRule {
+
+    // Maps from variable to a list of operators using that variable.
+    protected Map<LogicalVariable, List<ILogicalOperator>> usedVarsMap = new HashMap<LogicalVariable, List<ILogicalOperator>>();
+    protected List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
+    
+    @Override
+    protected void prepare(IOptimizationContext context) {
+        super.prepare(context);
+        usedVarsMap.clear();
+        usedVars.clear();
+    }
+    
+    @Override
+    protected boolean performFinalAction() throws AlgebricksException {
+        boolean modified = false;
+        for (Map.Entry<LogicalVariable, List<ILogicalOperator>> entry : usedVarsMap.entrySet()) {
+            // Perform replacement only if variable is referenced a single time.
+            if (entry.getValue().size() == 1) {
+                ILogicalOperator op = entry.getValue().get(0);
+                if (!op.requiresVariableReferenceExpressions()) {
+                    inlineVisitor.setOperator(op);
+                    inlineVisitor.setTargetVariable(entry.getKey());
+                    if (op.acceptExpressionTransform(inlineVisitor)) {
+                        modified = true;
+                    }
+                    inlineVisitor.setTargetVariable(null);
+                }         
+            }
+        }
+        return modified;
+    }
+
+    @Override
+    protected boolean performBottomUpAction(AbstractLogicalOperator op) throws AlgebricksException {
+        usedVars.clear();
+        VariableUtilities.getUsedVariables(op, usedVars);
+        for (LogicalVariable var : usedVars) {
+            List<ILogicalOperator> opsUsingVar = usedVarsMap.get(var);
+            if (opsUsingVar == null) {
+                opsUsingVar = new ArrayList<ILogicalOperator>();
+                usedVarsMap.put(var, opsUsingVar);
+            }
+            opsUsingVar.add(op);
+        }
+        return false;
+    }
+}
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
index 8a79d81..7fed577a 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InlineVariablesRule.java
@@ -15,330 +15,231 @@
 package edu.uci.ics.hyracks.algebricks.rewriter.rules;
 
 import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.LinkedList;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 import org.apache.commons.lang3.mutable.Mutable;
 
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.EquivalenceClass;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
 import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
 import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
 import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
 import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
-import edu.uci.ics.hyracks.algebricks.core.config.AlgebricksConfig;
 import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
 
+/**
+ * Replaces variable reference expressions with their assigned function-call expression where applicable
+ * (some variables are generated by datasources).
+ * Inlining variables may enable other optimizations by allowing selects and assigns to be moved
+ * (e.g., a select may be pushed into a join to enable an efficient physical join operator).
+ * 
+ * Preconditions/Assumptions:
+ * Assumes no projects are in the plan. Only inlines variables whose assigned expression is a function call 
+ * (i.e., this rule ignores right-hand side constants and other variable references expressions  
+ * 
+ * Postconditions/Examples:
+ * All qualifying variables have been inlined.
+ * 
+ * Example (simplified):
+ * 
+ * Before plan:
+ * select <- [$$1 < $$2 + $$0]
+ *   assign [$$2] <- [funcZ() + $$0]
+ *     assign [$$0, $$1] <- [funcX(), funcY()]
+ * 
+ * After plan:
+ * select <- [funcY() < funcZ() + funcX() + funcX()]
+ *   assign [$$2] <- [funcZ() + funcX()]
+ *     assign [$$0, $$1] <- [funcX(), funcY()]
+ */
 public class InlineVariablesRule implements IAlgebraicRewriteRule {
 
+    // Map of variables that could be replaced by their producing expression.
+    // Populated during the top-down sweep of the plan.
+    protected Map<LogicalVariable, ILogicalExpression> varAssignRhs = new HashMap<LogicalVariable, ILogicalExpression>();
+
+    // Visitor for replacing variable reference expressions with their originating expression.
+    protected InlineVariablesVisitor inlineVisitor = new InlineVariablesVisitor(varAssignRhs);
+    
+    // Set of FunctionIdentifiers that we should not inline.
+    protected Set<FunctionIdentifier> doNotInlineFuncs = new HashSet<FunctionIdentifier>();
+    
+    protected boolean hasRun = false;
+    
     @Override
     public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
         return false;
     }
 
     @Override
-    /**
-     * 
-     * Does one big DFS sweep over the plan.
-     * 
-     */
     public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        if (hasRun) {
+            return false;
+        }
         if (context.checkIfInDontApplySet(this, opRef.getValue())) {
             return false;
         }
-        VariableSubstitutionVisitor substVisitor = new VariableSubstitutionVisitor(false);
-        VariableSubstitutionVisitor substVisitorForWrites = new VariableSubstitutionVisitor(true);
-        substVisitor.setContext(context);
-        substVisitorForWrites.setContext(context);
-        Pair<Boolean, Boolean> bb = collectEqClassesAndRemoveRedundantOps(opRef, context, true,
-                new LinkedList<EquivalenceClass>(), substVisitor, substVisitorForWrites);
-        return bb.first;
+        prepare(context);
+        boolean modified = inlineVariables(opRef, context);
+        if (performFinalAction()) {
+            modified = true;
+        }
+        hasRun = true;
+        return modified;
+    }
+    
+    protected void prepare(IOptimizationContext context) {
+        varAssignRhs.clear();
+        inlineVisitor.setContext(context);
+    }
+    
+    protected boolean performBottomUpAction(AbstractLogicalOperator op) throws AlgebricksException {
+        // Only inline variables in operators that can deal with arbitrary expressions.
+        if (!op.requiresVariableReferenceExpressions()) {
+            inlineVisitor.setOperator(op);
+            return op.acceptExpressionTransform(inlineVisitor);
+        }
+        return false;
     }
 
-    private Pair<Boolean, Boolean> collectEqClassesAndRemoveRedundantOps(Mutable<ILogicalOperator> opRef,
-            IOptimizationContext context, boolean first, List<EquivalenceClass> equivClasses,
-            VariableSubstitutionVisitor substVisitor, VariableSubstitutionVisitor substVisitorForWrites)
+    protected boolean performFinalAction() throws AlgebricksException {
+        return false;
+    }
+    
+    protected boolean inlineVariables(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
             throws AlgebricksException {
         AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        // if (context.checkIfInDontApplySet(this, opRef.getValue())) {
-        // return false;
-        // }
-        if (op.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
-            return new Pair<Boolean, Boolean>(false, false);
-        }
-        boolean modified = false;
-        boolean ecChange = false;
-        int cnt = 0;
-        for (Mutable<ILogicalOperator> i : op.getInputs()) {
-            boolean isOuterInputBranch = op.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN && cnt == 1;
-            List<EquivalenceClass> eqc = isOuterInputBranch ? new LinkedList<EquivalenceClass>() : equivClasses;
-
-            Pair<Boolean, Boolean> bb = (collectEqClassesAndRemoveRedundantOps(i, context, false, eqc, substVisitor,
-                    substVisitorForWrites));
-
-            if (bb.first) {
-                modified = true;
-            }
-            if (bb.second) {
-                ecChange = true;
-            }
-
-            if (isOuterInputBranch) {
-                if (AlgebricksConfig.DEBUG) {
-                    AlgebricksConfig.ALGEBRICKS_LOGGER.finest("--- Equivalence classes for inner branch of outer op.: "
-                            + eqc + "\n");
-                }
-                for (EquivalenceClass ec : eqc) {
-                    if (!ec.representativeIsConst()) {
-                        equivClasses.add(ec);
-                    }
-                }
-            }
-
-            ++cnt;
-        }
-        if (op.hasNestedPlans()) {
-            AbstractOperatorWithNestedPlans n = (AbstractOperatorWithNestedPlans) op;
-            List<EquivalenceClass> eqc = equivClasses;
-            if (n.getOperatorTag() == LogicalOperatorTag.SUBPLAN) {
-                eqc = new LinkedList<EquivalenceClass>();
-            } else {
-                eqc = equivClasses;
-            }
-            for (ILogicalPlan p : n.getNestedPlans()) {
-                for (Mutable<ILogicalOperator> r : p.getRoots()) {
-                    Pair<Boolean, Boolean> bb = collectEqClassesAndRemoveRedundantOps(r, context, false, eqc,
-                            substVisitor, substVisitorForWrites);
-                    if (bb.first) {
-                        modified = true;
-                    }
-                    if (bb.second) {
-                        ecChange = true;
-                    }
-                }
-            }
-        }
-        // we assume a variable is assigned a value only once
+        
+        // Update mapping from variables to expressions during top-down traversal.
         if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-            AssignOperator a = (AssignOperator) op;
-            ILogicalExpression rhs = a.getExpressions().get(0).getValue();
-            if (rhs.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                LogicalVariable varLeft = a.getVariables().get(0);
-                VariableReferenceExpression varRef = (VariableReferenceExpression) rhs;
-                LogicalVariable varRight = varRef.getVariableReference();
-
-                EquivalenceClass ecRight = findEquivClass(varRight, equivClasses);
-                if (ecRight != null) {
-                    ecRight.addMember(varLeft);
-                } else {
-                    List<LogicalVariable> m = new LinkedList<LogicalVariable>();
-                    m.add(varRight);
-                    m.add(varLeft);
-                    EquivalenceClass ec = new EquivalenceClass(m, varRight);
-                    equivClasses.add(ec);
-                    if (AlgebricksConfig.DEBUG) {
-                        AlgebricksConfig.ALGEBRICKS_LOGGER.finest("--- New equivalence class: " + ec + "\n");
+            AssignOperator assignOp = (AssignOperator) op;
+            List<LogicalVariable> vars = assignOp.getVariables();
+            List<Mutable<ILogicalExpression>> exprs = assignOp.getExpressions();            
+            for (int i = 0; i < vars.size(); i++) {
+                ILogicalExpression expr = exprs.get(i).getValue();
+                // Ignore functions that are in the doNotInline set.                
+                if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+                    if (doNotInlineFuncs.contains(funcExpr.getFunctionIdentifier())) {
+                        continue;
                     }
                 }
-                ecChange = true;
-            } else if (((AbstractLogicalExpression) rhs).getExpressionTag() == LogicalExpressionTag.CONSTANT) {
-                LogicalVariable varLeft = a.getVariables().get(0);
-                List<LogicalVariable> m = new LinkedList<LogicalVariable>();
-                m.add(varLeft);
-                EquivalenceClass ec = new EquivalenceClass(m, (ConstantExpression) rhs);
-                // equivClassesForParent.add(ec);
-                equivClasses.add(ec);
-                ecChange = true;
-            }
-        } else if (op.getOperatorTag() == LogicalOperatorTag.GROUP && !(context.checkIfInDontApplySet(this, op))) {
-            GroupByOperator group = (GroupByOperator) op;
-            Pair<Boolean, Boolean> r1 = processVarExprPairs(group.getGroupByList(), equivClasses);
-            Pair<Boolean, Boolean> r2 = processVarExprPairs(group.getDecorList(), equivClasses);
-            modified = modified || r1.first || r2.first;
-            ecChange = r1.second || r2.second;
-        }
-        if (op.getOperatorTag() == LogicalOperatorTag.PROJECT) {
-            assignVarsNeededByProject((ProjectOperator) op, equivClasses, context);
-        } else {
-            if (op.getOperatorTag() == LogicalOperatorTag.WRITE) {
-                substVisitorForWrites.setEquivalenceClasses(equivClasses);
-                if (op.acceptExpressionTransform(substVisitorForWrites)) {
-                    modified = true;
-                }
-            } else {
-                substVisitor.setEquivalenceClasses(equivClasses);
-                if (op.acceptExpressionTransform(substVisitor)) {
-                    modified = true;
-                    if (op.getOperatorTag() == LogicalOperatorTag.GROUP) {
-                        GroupByOperator group = (GroupByOperator) op;
-                        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> gp : group.getGroupByList()) {
-                            if (gp.first != null
-                                    && gp.second.getValue().getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                                LogicalVariable gv = ((VariableReferenceExpression) gp.second.getValue())
-                                        .getVariableReference();
-                                Iterator<Pair<LogicalVariable, Mutable<ILogicalExpression>>> iter = group
-                                        .getDecorList().iterator();
-                                while (iter.hasNext()) {
-                                    Pair<LogicalVariable, Mutable<ILogicalExpression>> dp = iter.next();
-                                    if (dp.first == null
-                                            && dp.second.getValue().getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                                        LogicalVariable dv = ((VariableReferenceExpression) dp.second.getValue())
-                                                .getVariableReference();
-                                        if (dv == gv) {
-                                            // The decor variable is redundant,
-                                            // since it is
-                                            // propagated as a grouping
-                                            // variable.
-                                            EquivalenceClass ec1 = findEquivClass(gv, equivClasses);
-                                            if (ec1 != null) {
-                                                ec1.addMember(gp.first);
-                                                ec1.setVariableRepresentative(gp.first);
-                                            } else {
-                                                List<LogicalVariable> varList = new ArrayList<LogicalVariable>();
-                                                varList.add(gp.first);
-                                                varList.add(gv);
-                                                ec1 = new EquivalenceClass(varList, gp.first);
-                                            }
-                                            iter.remove();
-                                            break;
-                                        }
-                                    }
-                                }
-                            }
-                        }
-                    }
-                }
+                varAssignRhs.put(vars.get(i), exprs.get(i).getValue());
             }
         }
-        return new Pair<Boolean, Boolean>(modified, ecChange);
-    }
 
-    private Pair<Boolean, Boolean> processVarExprPairs(
-            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> vePairs, List<EquivalenceClass> equivClasses) {
-        boolean ecFromGroup = false;
+        // Descend into children removing projects on the way.
         boolean modified = false;
-        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : vePairs) {
-            ILogicalExpression expr = p.second.getValue();
-            if (p.first != null && expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
-                LogicalVariable rhsVar = varRef.getVariableReference();
-                ecFromGroup = true;
-                EquivalenceClass ecRight = findEquivClass(rhsVar, equivClasses);
-                if (ecRight != null) {
-                    LogicalVariable replacingVar = ecRight.getVariableRepresentative();
-                    if (replacingVar != null && replacingVar != rhsVar) {
-                        varRef.setVariable(replacingVar);
-                        modified = true;
-                    }
-                }
-            }
+        for (Mutable<ILogicalOperator> inputOpRef : op.getInputs()) {
+            if (inlineVariables(inputOpRef, context)) {
+                modified = true;
+            }            
         }
-        return new Pair<Boolean, Boolean>(modified, ecFromGroup);
+
+        if (performBottomUpAction(op)) {
+            modified = true;
+        }
+        
+        if (modified) {
+            context.computeAndSetTypeEnvironmentForOperator(op);
+            context.addToDontApplySet(this, op);
+            // Re-enable rules that we may have already tried. They could be applicable now after inlining.
+            context.removeFromAlreadyCompared(opRef.getValue());
+        }
+
+        return modified;
     }
 
-    // Instead of doing this, we could make Projection to be more expressive and
-    // also take constants (or even expression), at the expense of a more
-    // complex project push down.
-    private void assignVarsNeededByProject(ProjectOperator op, List<EquivalenceClass> equivClasses,
-            IOptimizationContext context) throws AlgebricksException {
-        List<LogicalVariable> prVars = op.getVariables();
-        int sz = prVars.size();
-        for (int i = 0; i < sz; i++) {
-            EquivalenceClass ec = findEquivClass(prVars.get(i), equivClasses);
-            if (ec != null) {
-                if (!ec.representativeIsConst()) {
-                    prVars.set(i, ec.getVariableRepresentative());
-                }
-            }
-        }
-    }
-
-    private final static EquivalenceClass findEquivClass(LogicalVariable var, List<EquivalenceClass> equivClasses) {
-        for (EquivalenceClass ec : equivClasses) {
-            if (ec.contains(var)) {
-                return ec;
-            }
-        }
-        return null;
-    }
-
-    private class VariableSubstitutionVisitor implements ILogicalExpressionReferenceTransform {
-        private List<EquivalenceClass> equivClasses;
+    protected class InlineVariablesVisitor implements ILogicalExpressionReferenceTransform {
+        
+        private final Map<LogicalVariable, ILogicalExpression> varAssignRhs;
+        private final Set<LogicalVariable> liveVars = new HashSet<LogicalVariable>();
+        private final List<LogicalVariable> rhsUsedVars = new ArrayList<LogicalVariable>();        
+        private ILogicalOperator op;
         private IOptimizationContext context;
-        private final boolean doNotSubstWithConst;
-
-        public VariableSubstitutionVisitor(boolean doNotSubstWithConst) {
-            this.doNotSubstWithConst = doNotSubstWithConst;
+        // If set, only replace this variable reference.
+        private LogicalVariable targetVar;
+        
+        public InlineVariablesVisitor(Map<LogicalVariable, ILogicalExpression> varAssignRhs) {
+            this.varAssignRhs = varAssignRhs;
         }
-
+        
+        public void setTargetVariable(LogicalVariable targetVar) {
+            this.targetVar = targetVar;
+        }
+        
         public void setContext(IOptimizationContext context) {
             this.context = context;
         }
 
-        public void setEquivalenceClasses(List<EquivalenceClass> equivClasses) {
-            this.equivClasses = equivClasses;
+        public void setOperator(ILogicalOperator op) throws AlgebricksException {
+            this.op = op;
+            liveVars.clear();
         }
-
+        
         @Override
-        public boolean transform(Mutable<ILogicalExpression> exprRef) {
+        public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {            
             ILogicalExpression e = exprRef.getValue();
             switch (((AbstractLogicalExpression) e).getExpressionTag()) {
                 case VARIABLE: {
-                    // look for a required substitution
                     LogicalVariable var = ((VariableReferenceExpression) e).getVariableReference();
+                    // Restrict replacement to targetVar if it has been set.
+                    if (targetVar != null && var != targetVar) {
+                        return false;
+                    }
+                    // Make sure has not been excluded from inlining.
                     if (context.shouldNotBeInlined(var)) {
                         return false;
                     }
-                    EquivalenceClass ec = findEquivClass(var, equivClasses);
-                    if (ec == null) {
+                    ILogicalExpression rhs = varAssignRhs.get(var);
+                    if (rhs == null) {
+                        // Variable was not produced by an assign.
                         return false;
                     }
-                    if (ec.representativeIsConst()) {
-                        if (doNotSubstWithConst) {
-                            return false;
-                        }
-                        exprRef.setValue(ec.getConstRepresentative());
-                        return true;
-                    } else {
-                        LogicalVariable r = ec.getVariableRepresentative();
-                        if (!r.equals(var)) {
-                            exprRef.setValue(new VariableReferenceExpression(r));
-                            return true;
-                        } else {
+                    
+                    // Make sure used variables from rhs are live.
+                    if (liveVars.isEmpty()) {
+                        VariableUtilities.getLiveVariables(op, liveVars);
+                    }
+                    rhsUsedVars.clear();
+                    rhs.getUsedVariables(rhsUsedVars);
+                    for (LogicalVariable rhsUsedVar : rhsUsedVars) {
+                        if (!liveVars.contains(rhsUsedVar)) {
                             return false;
                         }
                     }
+                    
+                    // Replace variable reference with a clone of the rhs expr.
+                    exprRef.setValue(rhs.cloneExpression());
+                    return true;
                 }
                 case FUNCTION_CALL: {
                     AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) e;
-                    boolean m = false;
+                    boolean modified = false;
                     for (Mutable<ILogicalExpression> arg : fce.getArguments()) {
                         if (transform(arg)) {
-                            m = true;
+                            modified = true;
                         }
                     }
-                    return m;
+                    return modified;
                 }
                 default: {
                     return false;
                 }
             }
         }
-
     }
 }
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertProjectBeforeUnionRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertProjectBeforeUnionRule.java
index d54833e..431fca1 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertProjectBeforeUnionRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/InsertProjectBeforeUnionRule.java
@@ -87,6 +87,7 @@
         projectOp.getInputs().add(new MutableObject<ILogicalOperator>(parentOp));
         opUnion.getInputs().get(branch).setValue(projectOp);
         projectOp.setPhysicalOperator(new StreamProjectPOperator());
+        context.computeAndSetTypeEnvironmentForOperator(projectOp);
         context.computeAndSetTypeEnvironmentForOperator(parentOp);
     }
 
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByCombinerRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByCombinerRule.java
index 5c5fdb1..a8864fe 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByCombinerRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByCombinerRule.java
@@ -38,7 +38,6 @@
         if (context.checkIfInDontApplySet(this, op)) {
             return false;
         }
-        context.addToDontApplySet(this, op);
         if (op.getOperatorTag() != LogicalOperatorTag.GROUP) {
             return false;
         }
@@ -86,6 +85,7 @@
         opRef3.setValue(newGbyOp);
         typeGby(newGbyOp, context);
         typeGby(gbyOp, context);
+    	context.addToDontApplySet(this, op);
         return true;
     }
 
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceProjectsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceProjectsRule.java
new file mode 100644
index 0000000..a057f4f
--- /dev/null
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceProjectsRule.java
@@ -0,0 +1,171 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.rewriter.rules;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Projects away unused variables at the earliest possible point.
+ * Does a full DFS sweep of the plan adding ProjectOperators in the bottom-up pass.
+ * Also, removes projects that have become useless.
+ * TODO: This rule 'recklessly' adds as many projects as possible, but there is no guarantee
+ * that the overall cost of the plan is reduced since project operators also add a cost.
+ */
+public class IntroduceProjectsRule implements IAlgebraicRewriteRule {
+
+    private final Set<LogicalVariable> usedVars = new HashSet<LogicalVariable>();
+    private final Set<LogicalVariable> liveVars = new HashSet<LogicalVariable>();
+    private final Set<LogicalVariable> producedVars = new HashSet<LogicalVariable>();
+    private final List<LogicalVariable> projectVars = new ArrayList<LogicalVariable>();
+    protected boolean hasRun = false;
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        if (hasRun) {
+            return false;
+        }
+        hasRun = true;
+        return introduceProjects(null, -1, opRef, Collections.<LogicalVariable> emptySet(), context);
+    }
+
+    protected boolean introduceProjects(AbstractLogicalOperator parentOp, int parentInputIndex,
+            Mutable<ILogicalOperator> opRef, Set<LogicalVariable> parentUsedVars, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        boolean modified = false;
+
+        usedVars.clear();
+        VariableUtilities.getUsedVariables(op, usedVars);
+
+        // In the top-down pass, maintain a set of variables that are used in op and all its parents.
+        HashSet<LogicalVariable> parentsUsedVars = new HashSet<LogicalVariable>();
+        parentsUsedVars.addAll(parentUsedVars);
+        parentsUsedVars.addAll(usedVars);
+
+        // Descend into children.        
+        for (int i = 0; i < op.getInputs().size(); i++) {
+            Mutable<ILogicalOperator> inputOpRef = op.getInputs().get(i);
+            if (introduceProjects(op, i, inputOpRef, parentsUsedVars, context)) {
+                modified = true;
+            }
+        }
+
+        if (modified) {
+            context.computeAndSetTypeEnvironmentForOperator(op);
+        }
+        // In the bottom-up pass, determine which live variables are not used by op's parents.
+        // Such variables are be projected away.
+        liveVars.clear();
+        VariableUtilities.getLiveVariables(op, liveVars);
+        producedVars.clear();
+        VariableUtilities.getProducedVariables(op, producedVars);
+        liveVars.removeAll(producedVars);
+
+        projectVars.clear();
+        for (LogicalVariable liveVar : liveVars) {
+            if (parentsUsedVars.contains(liveVar)) {
+                projectVars.add(liveVar);
+            }
+        }
+
+        // Some of the variables that are live at this op are not used above.
+        if (projectVars.size() != liveVars.size()) {
+            // Add a project operator under each of op's qualifying input branches.
+            for (int i = 0; i < op.getInputs().size(); i++) {
+                ILogicalOperator childOp = op.getInputs().get(i).getValue();
+                liveVars.clear();
+                VariableUtilities.getLiveVariables(childOp, liveVars);
+                List<LogicalVariable> vars = new ArrayList<LogicalVariable>();
+                vars.addAll(projectVars);
+                // Only retain those variables that are live in the i-th input branch.
+                vars.retainAll(liveVars);
+                if (vars.size() != liveVars.size()) {
+                    ProjectOperator projectOp = new ProjectOperator(vars);
+                    projectOp.getInputs().add(new MutableObject<ILogicalOperator>(childOp));
+                    op.getInputs().get(i).setValue(projectOp);
+                    context.computeAndSetTypeEnvironmentForOperator(projectOp);
+                    modified = true;
+                }
+            }
+        } else if (op.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+            // Check if the existing project has become useless.
+            liveVars.clear();
+            VariableUtilities.getLiveVariables(op.getInputs().get(0).getValue(), liveVars);
+            ProjectOperator projectOp = (ProjectOperator) op;
+            List<LogicalVariable> projectVars = projectOp.getVariables();
+            if (liveVars.size() == projectVars.size() && liveVars.containsAll(projectVars)) {
+                boolean eliminateProject = true;
+                // For UnionAll the variables must also be in exactly the correct order.
+                if (parentOp.getOperatorTag() == LogicalOperatorTag.UNIONALL) {
+                    eliminateProject = canEliminateProjectBelowUnion((UnionAllOperator) parentOp, projectOp,
+                            parentInputIndex);
+                }
+                if (eliminateProject) {
+                    // The existing project has become useless. Remove it.
+                    parentOp.getInputs().get(parentInputIndex).setValue(op.getInputs().get(0).getValue());
+                }
+            }
+        }
+
+        if (modified) {
+            context.computeAndSetTypeEnvironmentForOperator(op);
+        }
+        return modified;
+    }
+    
+    private boolean canEliminateProjectBelowUnion(UnionAllOperator unionOp, ProjectOperator projectOp,
+            int unionInputIndex) throws AlgebricksException {
+        List<LogicalVariable> orderedLiveVars = new ArrayList<LogicalVariable>();
+        VariableUtilities.getLiveVariables(projectOp.getInputs().get(0).getValue(), orderedLiveVars);
+        int numVars = orderedLiveVars.size();
+        for (int i = 0; i < numVars; i++) {
+            Triple<LogicalVariable, LogicalVariable, LogicalVariable> varTriple = unionOp.getVariableMappings().get(i);
+            if (unionInputIndex == 0) {
+                if (varTriple.first != orderedLiveVars.get(i)) {
+                    return false;
+                }
+            } else {
+                if (varTriple.second != orderedLiveVars.get(i)) {
+                    return false;
+                }
+            }
+        }
+        return true;
+    }
+}
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
index 75862cf..8b4f0a1 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
@@ -38,9 +38,6 @@
 
 public class PullSelectOutOfEqJoin implements IAlgebraicRewriteRule {
 
-    private List<Mutable<ILogicalExpression>> eqVarVarComps = new ArrayList<Mutable<ILogicalExpression>>();
-    private List<Mutable<ILogicalExpression>> otherPredicates = new ArrayList<Mutable<ILogicalExpression>>();
-
     @Override
     public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
         return false;
@@ -66,8 +63,8 @@
         if (!fi.equals(AlgebricksBuiltinFunctions.AND)) {
             return false;
         }
-        eqVarVarComps.clear();
-        otherPredicates.clear();
+        List<Mutable<ILogicalExpression>> eqVarVarComps = new ArrayList<Mutable<ILogicalExpression>>();
+        List<Mutable<ILogicalExpression>> otherPredicates = new ArrayList<Mutable<ILogicalExpression>>();
         for (Mutable<ILogicalExpression> arg : fexp.getArguments()) {
             if (isEqVarVar(arg.getValue())) {
                 eqVarVarComps.add(arg);
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java
new file mode 100644
index 0000000..1bcf95a
--- /dev/null
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java
@@ -0,0 +1,149 @@
+package edu.uci.ics.hyracks.algebricks.rewriter.rules;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Pushes an AssignOperator below a UnionAll operator by creating an new AssignOperator below each of 
+ * the UnionAllOperator's branches with appropriate variable replacements.
+ * This rule can help to enable other rules that are difficult to fire across a UnionAllOperator, 
+ * for example, eliminating common sub-expressions.
+ * 
+ * Example:
+ * 
+ * Before plan:
+ * ...
+ * assign [$$20, $$21] <- [funcA($$3), funcB($$6)]
+ *   union ($$1, $$2, $$3) ($$4, $$5, $$6)
+ *     union_branch_0
+ *       ...
+ *     union_branch_1
+ *       ...
+ *     
+ * After plan:
+ * ...
+ * union ($$1, $$2, $$3) ($$4, $$5, $$6) ($$22, $$24, $$20) ($$23, $$25, $$21)
+ *   assign [$$22, $$23] <- [funcA($$1), funcB($$4)]
+ *     union_branch_0
+ *       ...
+ *   assign [$$24, $$25] <- [funcA($$2), funcB($$5)]
+ *     union_branch_1
+ *       ...
+ */
+public class PushAssignBelowUnionAllRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (!op.hasInputs()) {
+            return false;
+        }
+
+        boolean modified = false;
+        for (int i = 0; i < op.getInputs().size(); i++) {
+            AbstractLogicalOperator childOp = (AbstractLogicalOperator) op.getInputs().get(i).getValue();
+            if (childOp.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+                continue;
+            }
+            AssignOperator assignOp = (AssignOperator) childOp;
+
+            AbstractLogicalOperator childOfChildOp = (AbstractLogicalOperator) assignOp.getInputs().get(0).getValue();
+            if (childOfChildOp.getOperatorTag() != LogicalOperatorTag.UNIONALL) {
+                continue;
+            }
+            UnionAllOperator unionOp = (UnionAllOperator) childOfChildOp;
+
+            Set<LogicalVariable> assignUsedVars = new HashSet<LogicalVariable>();
+            VariableUtilities.getUsedVariables(assignOp, assignUsedVars);
+
+            List<LogicalVariable> assignVars = assignOp.getVariables();
+
+            AssignOperator[] newAssignOps = new AssignOperator[2];
+            for (int j = 0; j < unionOp.getInputs().size(); j++) {
+                newAssignOps[j] = createAssignBelowUnionAllBranch(unionOp, j, assignOp, assignUsedVars, context);
+            }
+            // Add original assign variables to the union variable mappings.
+            for (int j = 0; j < assignVars.size(); j++) {
+                LogicalVariable first = newAssignOps[0].getVariables().get(j);
+                LogicalVariable second = newAssignOps[1].getVariables().get(j);
+                Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(
+                        first, second, assignVars.get(j));
+                unionOp.getVariableMappings().add(varMapping);
+            }
+            context.computeAndSetTypeEnvironmentForOperator(unionOp);
+
+            // Remove original assign operator.
+            op.getInputs().set(i, assignOp.getInputs().get(0));
+            context.computeAndSetTypeEnvironmentForOperator(op);
+            modified = true;
+        }
+
+        return modified;
+    }
+
+    private AssignOperator createAssignBelowUnionAllBranch(UnionAllOperator unionOp, int inputIndex,
+            AssignOperator originalAssignOp, Set<LogicalVariable> assignUsedVars, IOptimizationContext context)
+            throws AlgebricksException {
+        AssignOperator newAssignOp = cloneAssignOperator(originalAssignOp, context);
+        newAssignOp.getInputs()
+                .add(new MutableObject<ILogicalOperator>(unionOp.getInputs().get(inputIndex).getValue()));
+        context.computeAndSetTypeEnvironmentForOperator(newAssignOp);
+        unionOp.getInputs().get(inputIndex).setValue(newAssignOp);
+        int numVarMappings = unionOp.getVariableMappings().size();
+        for (int i = 0; i < numVarMappings; i++) {
+            Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping = unionOp.getVariableMappings().get(i);
+            if (assignUsedVars.contains(varMapping.third)) {
+                LogicalVariable replacementVar;
+                if (inputIndex == 0) {
+                    replacementVar = varMapping.first;
+                } else {
+                    replacementVar = varMapping.second;
+                }
+                VariableUtilities.substituteVariables(newAssignOp, varMapping.third, replacementVar, context);
+            }
+        }
+        return newAssignOp;
+    }
+
+    /**
+     * Clones the given assign operator changing the returned variables to be new ones.
+     * Also, leaves the inputs of the clone clear.
+     */
+    private AssignOperator cloneAssignOperator(AssignOperator assignOp, IOptimizationContext context) {
+        List<LogicalVariable> vars = new ArrayList<LogicalVariable>();
+        List<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
+        int numVars = assignOp.getVariables().size();
+        for (int i = 0; i < numVars; i++) {
+            vars.add(context.newVar());
+            exprs.add(new MutableObject<ILogicalExpression>(assignOp.getExpressions().get(i).getValue()
+                    .cloneExpression()));
+        }
+        AssignOperator assignCloneOp = new AssignOperator(vars, exprs);
+        assignCloneOp.setExecutionMode(assignOp.getExecutionMode());
+        return assignCloneOp;
+    }
+}
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java
new file mode 100644
index 0000000..16b010e
--- /dev/null
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java
@@ -0,0 +1,208 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.algebricks.rewriter.rules;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Pushes function-call expressions below a join if possible.
+ * Assigns the result of such function-calls expressions to new variables, and replaces the original
+ * expression with a corresponding variable reference expression.
+ * This rule can help reduce the cost of computing expensive functions by pushing them below
+ * a join (which may blow up the cardinality).
+ * Also, this rule may help to enable other rules such as common subexpression elimination, again to reduce
+ * the number of calls to expensive functions.
+ * 
+ * Example: (we are pushing pushMeFunc)
+ * 
+ * Before plan:
+ * assign [$$10] <- [funcA(funcB(pushMeFunc($$3, $$4)))]
+ *   join (some condition) 
+ *     join_branch_0 where $$3 and $$4 are not live
+ *       ...
+ *     join_branch_1 where $$3 and $$4 are live
+ *       ...
+ * 
+ * After plan:
+ * assign [$$10] <- [funcA(funcB($$11))]
+ *   join (some condition) 
+ *     join_branch_0 where $$3 and $$4 are not live
+ *       ...
+ *     join_branch_1 where $$3 and $$4 are live
+ *       assign[$$11] <- [pushMeFunc($$3, $$4)]
+ *         ...
+ */
+public class PushFunctionsBelowJoin implements IAlgebraicRewriteRule {
+
+    private final Set<FunctionIdentifier> toPushFuncIdents;
+    private final List<Mutable<ILogicalExpression>> funcExprs = new ArrayList<Mutable<ILogicalExpression>>();
+    private final List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
+    private final List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
+
+    public PushFunctionsBelowJoin(Set<FunctionIdentifier> toPushFuncIdents) {
+        this.toPushFuncIdents = toPushFuncIdents;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+            return false;
+        }
+        AssignOperator assignOp = (AssignOperator) op;
+
+        // Find a join operator below this assign.
+        Mutable<ILogicalOperator> joinOpRef = findJoinOp(assignOp.getInputs().get(0));
+        if (joinOpRef == null) {
+            return false;
+        }
+        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) joinOpRef.getValue();
+
+        // Check if the assign uses a function that we wish to push below the join if possible.
+        funcExprs.clear();
+        gatherFunctionCalls(assignOp, funcExprs);
+        if (funcExprs.isEmpty()) {
+            return false;
+        }
+
+        // Try to push the functions down the input branches of the join.
+        boolean modified = false;
+        if (pushDownFunctions(joinOp, 0, funcExprs, context)) {
+            modified = true;
+        }
+        if (pushDownFunctions(joinOp, 1, funcExprs, context)) {
+            modified = true;
+        }
+        if (modified) {
+            context.computeAndSetTypeEnvironmentForOperator(joinOp);
+        }
+        return modified;
+    }
+
+    private Mutable<ILogicalOperator> findJoinOp(Mutable<ILogicalOperator> opRef) {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        switch (op.getOperatorTag()) {
+            case INNERJOIN:
+            case LEFTOUTERJOIN: {
+                return opRef;
+            }
+            // Bail on these operators.
+            case GROUP:
+            case AGGREGATE:
+            case DISTINCT:
+            case UNNEST_MAP: {
+                return null;
+            }
+            // Traverse children.
+            default: {
+                for (Mutable<ILogicalOperator> childOpRef : op.getInputs()) {
+                    return findJoinOp(childOpRef);
+                }
+            }
+        }
+        return null;
+    }
+
+    private void gatherFunctionCalls(AssignOperator assignOp, List<Mutable<ILogicalExpression>> funcExprs) {
+        for (Mutable<ILogicalExpression> exprRef : assignOp.getExpressions()) {
+            gatherFunctionCalls(exprRef, funcExprs);
+        }
+    }
+
+    private void gatherFunctionCalls(Mutable<ILogicalExpression> exprRef, List<Mutable<ILogicalExpression>> funcExprs) {
+        AbstractLogicalExpression expr = (AbstractLogicalExpression) exprRef.getValue();
+        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return;
+        }
+        // Check whether the function is a function we want to push.
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        if (toPushFuncIdents.contains(funcExpr.getFunctionIdentifier())) {
+            funcExprs.add(exprRef);
+        }
+        // Traverse arguments.
+        for (Mutable<ILogicalExpression> funcArg : funcExpr.getArguments()) {
+            gatherFunctionCalls(funcArg, funcExprs);
+        }
+    }
+
+    private boolean pushDownFunctions(AbstractBinaryJoinOperator joinOp, int inputIndex,
+            List<Mutable<ILogicalExpression>> funcExprs, IOptimizationContext context) throws AlgebricksException {
+        ILogicalOperator joinInputOp = joinOp.getInputs().get(inputIndex).getValue();
+        liveVars.clear();
+        VariableUtilities.getLiveVariables(joinInputOp, liveVars);
+        Iterator<Mutable<ILogicalExpression>> funcIter = funcExprs.iterator();
+        List<LogicalVariable> assignVars = null;
+        List<Mutable<ILogicalExpression>> assignExprs = null;
+        while (funcIter.hasNext()) {
+            Mutable<ILogicalExpression> funcExprRef = funcIter.next();
+            ILogicalExpression funcExpr = funcExprRef.getValue();
+            usedVars.clear();
+            funcExpr.getUsedVariables(usedVars);
+            // Check if we can push the function down this branch.
+            if (liveVars.containsAll(usedVars)) {
+                if (assignVars == null) {
+                    assignVars = new ArrayList<LogicalVariable>();
+                    assignExprs = new ArrayList<Mutable<ILogicalExpression>>();
+                }
+                // Replace the original expression with a variable reference expression.
+                LogicalVariable replacementVar = context.newVar();
+                assignVars.add(replacementVar);
+                assignExprs.add(new MutableObject<ILogicalExpression>(funcExpr));
+                funcExprRef.setValue(new VariableReferenceExpression(replacementVar));
+                funcIter.remove();
+            }
+        }
+        // Create new assign operator below the join if any functions can be pushed.
+        if (assignVars != null) {
+            AssignOperator newAssign = new AssignOperator(assignVars, assignExprs);
+            newAssign.getInputs().add(new MutableObject<ILogicalOperator>(joinInputOp));
+            newAssign.setExecutionMode(joinOp.getExecutionMode());
+            joinOp.getInputs().get(inputIndex).setValue(newAssign);
+            context.computeAndSetTypeEnvironmentForOperator(newAssign);
+            return true;
+        }
+        return false;
+    }
+}
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectIntoJoinRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectIntoJoinRule.java
index 8c679c5..99a6b8c 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectIntoJoinRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushSelectIntoJoinRule.java
@@ -143,11 +143,11 @@
         if (!intersectsBranch[0] && !intersectsBranch[1]) {
             return false;
         }
+        if (needToPushOps) {
+            pushOps(pushedOnLeft, joinBranchLeftRef, context);
+            pushOps(pushedOnRight, joinBranchRightRef, context);
+        }
         if (intersectsAllBranches) {
-            if (needToPushOps) {
-                pushOps(pushedOnLeft, joinBranchLeftRef, context);
-                pushOps(pushedOnRight, joinBranchRightRef, context);
-            }
             addCondToJoin(select, join, context);
         } else { // push down
             Iterator<Mutable<ILogicalOperator>> branchIter = join.getInputs().iterator();
@@ -156,13 +156,6 @@
                 Mutable<ILogicalOperator> branch = branchIter.next();
                 boolean inter = intersectsBranch[j];
                 if (inter) {
-                    if (needToPushOps) {
-                        if (j == 0) {
-                            pushOps(pushedOnLeft, joinBranchLeftRef, context);
-                        } else {
-                            pushOps(pushedOnRight, joinBranchRightRef, context);
-                        }
-                    }
                     copySelectToBranch(select, branch, context);
                 }
 
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantGroupByDecorVars.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantGroupByDecorVars.java
new file mode 100644
index 0000000..1106898
--- /dev/null
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantGroupByDecorVars.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.rewriter.rules;
+
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Removes duplicate variables from a group-by operator's decor list.
+ */
+public class RemoveRedundantGroupByDecorVars implements IAlgebraicRewriteRule {
+
+    private final Set<LogicalVariable> vars = new HashSet<LogicalVariable>();
+    
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.GROUP) {
+            return false;
+        }
+        if (context.checkIfInDontApplySet(this, op)) {
+            return false;
+        }
+        vars.clear();
+        
+        boolean modified = false;
+        GroupByOperator groupOp = (GroupByOperator) op;
+        Iterator<Pair<LogicalVariable, Mutable<ILogicalExpression>>> iter = groupOp.getDecorList().iterator();
+        while (iter.hasNext()) {
+            Pair<LogicalVariable, Mutable<ILogicalExpression>> decor = iter.next();
+            if (decor.first != null || decor.second.getValue().getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+                continue;
+            }
+            VariableReferenceExpression varRefExpr = (VariableReferenceExpression) decor.second.getValue();
+            LogicalVariable var = varRefExpr.getVariableReference();
+            if (vars.contains(var)) {
+                iter.remove();
+                modified = true;
+            } else {
+                vars.add(var);
+            }
+        }
+        if (modified) {
+            context.addToDontApplySet(this, op);
+        }
+        return modified;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+}
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
new file mode 100644
index 0000000..ec57be5
--- /dev/null
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
@@ -0,0 +1,291 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.rewriter.rules;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Replaces redundant variable references with their bottom-most equivalent representative.
+ * Does a DFS sweep over the plan keeping track of variable equivalence classes.
+ * For example, this rule would perform the following rewrite.
+ * 
+ * Before Plan:
+ * select (function-call: func, Args:[%0->$$11])
+ *   project [$11]
+ *     assign [$$11] <- [$$10]
+ *       assign [$$10] <- [$$9]
+ *         assign [$$9] <- ...
+ *           ...
+ *           
+ * After Plan:
+ * select (function-call: func, Args:[%0->$$9])
+ *   project [$9]
+ *     assign [$$11] <- [$$9]
+ *       assign [$$10] <- [$$9]
+ *         assign [$$9] <- ...
+ *           ...
+ */
+public class RemoveRedundantVariablesRule implements IAlgebraicRewriteRule {
+
+    private final VariableSubstitutionVisitor substVisitor = new VariableSubstitutionVisitor();
+    private final Map<LogicalVariable, List<LogicalVariable>> equivalentVarsMap = new HashMap<LogicalVariable, List<LogicalVariable>>();
+
+    protected boolean hasRun = false;
+    
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        if (context.checkIfInDontApplySet(this, opRef.getValue())) {
+            return false;
+        }
+        equivalentVarsMap.clear();
+        boolean modified = removeRedundantVariables(opRef, context);
+        if (modified) {
+            context.computeAndSetTypeEnvironmentForOperator(opRef.getValue());
+        }
+        return modified;
+    }
+
+    private void updateEquivalenceClassMap(LogicalVariable lhs, LogicalVariable rhs) {
+        List<LogicalVariable> equivalentVars = equivalentVarsMap.get(rhs);
+        if (equivalentVars == null) {
+            equivalentVars = new ArrayList<LogicalVariable>();
+            // The first element in the list is the bottom-most representative which will replace all equivalent vars.
+            equivalentVars.add(rhs);
+            equivalentVars.add(lhs);
+            equivalentVarsMap.put(rhs, equivalentVars);
+        }
+        equivalentVarsMap.put(lhs, equivalentVars);
+        equivalentVars.get(0);
+    }
+
+    private boolean removeRedundantVariables(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        boolean modified = false;
+
+        // Recurse into children.
+        for (Mutable<ILogicalOperator> inputOpRef : op.getInputs()) {
+            if (removeRedundantVariables(inputOpRef, context)) {
+                modified = true;
+            }
+        }
+
+        // Update equivalence class map.
+        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            AssignOperator assignOp = (AssignOperator) op;
+            int numVars = assignOp.getVariables().size();
+            for (int i = 0; i < numVars; i++) {
+                ILogicalExpression expr = assignOp.getExpressions().get(i).getValue();
+                if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+                    continue;
+                }
+                VariableReferenceExpression rhsVarRefExpr = (VariableReferenceExpression) expr;
+                // Update equivalence class map.
+                LogicalVariable lhs = assignOp.getVariables().get(i);
+                LogicalVariable rhs = rhsVarRefExpr.getVariableReference();
+                updateEquivalenceClassMap(lhs, rhs);
+            }
+        }
+
+        // Replace variable references with their first representative. 
+        if (op.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+            // The project operator does not use expressions, so we need to replace it's variables manually.
+            if (replaceProjectVars((ProjectOperator) op)) {
+                modified = true;
+            }
+        } else if(op.getOperatorTag() == LogicalOperatorTag.UNIONALL) {
+            // Replace redundant variables manually in the UnionAll operator.
+            if (replaceUnionAllVars((UnionAllOperator) op)) {
+                modified = true;
+            }
+        } else {
+            if (op.acceptExpressionTransform(substVisitor)) {
+                modified = true;
+            }
+        }
+
+        // Perform variable replacement in nested plans. 
+        if (op.hasNestedPlans()) {
+            AbstractOperatorWithNestedPlans opWithNestedPlan = (AbstractOperatorWithNestedPlans) op;
+            for (ILogicalPlan nestedPlan : opWithNestedPlan.getNestedPlans()) {
+                for (Mutable<ILogicalOperator> rootRef : nestedPlan.getRoots()) {
+                    if (removeRedundantVariables(rootRef, context)) {
+                        modified = true;
+                    }
+                }
+            }
+        }
+
+        // Deal with re-mapping of variables in group by.
+        if (op.getOperatorTag() == LogicalOperatorTag.GROUP) {
+            if (handleGroupByVarRemapping((GroupByOperator) op)) {
+                modified = true;
+            }
+        }
+
+        if (modified) {
+            context.computeAndSetTypeEnvironmentForOperator(op);
+            context.addToDontApplySet(this, op);
+        }
+        return modified;
+    }
+
+    private boolean handleGroupByVarRemapping(GroupByOperator groupOp) {
+        boolean modified = false;
+        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> gp : groupOp.getGroupByList()) {
+            if (gp.first == null || gp.second.getValue().getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+                continue;
+            }
+            LogicalVariable groupByVar = ((VariableReferenceExpression) gp.second.getValue()).getVariableReference();
+            Iterator<Pair<LogicalVariable, Mutable<ILogicalExpression>>> iter = groupOp.getDecorList().iterator();
+            while (iter.hasNext()) {
+                Pair<LogicalVariable, Mutable<ILogicalExpression>> dp = iter.next();
+                if (dp.first != null || dp.second.getValue().getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+                    continue;
+                }
+                LogicalVariable dv = ((VariableReferenceExpression) dp.second.getValue()).getVariableReference();
+                if (dv == groupByVar) {
+                    // The decor variable is redundant, since it is propagated as a grouping variable.
+                    List<LogicalVariable> equivalentVars = equivalentVarsMap.get(groupByVar);
+                    if (equivalentVars != null) {
+                        // Change representative of this equivalence class.
+                        equivalentVars.set(0, gp.first);
+                        equivalentVarsMap.put(gp.first, equivalentVars);
+                    } else {
+                        updateEquivalenceClassMap(gp.first, groupByVar);
+                    }
+                    iter.remove();
+                    modified = true;
+                    break;
+                }
+            }
+        }
+        return modified;
+    }
+
+    /**
+     * Replace the projects's variables with their corresponding representative
+     * from the equivalence class map (if any).
+     * We cannot use the VariableSubstitutionVisitor here because the project ops
+     * maintain their variables as a list and not as expressions.
+     */
+    private boolean replaceProjectVars(ProjectOperator op) throws AlgebricksException {
+        List<LogicalVariable> vars = op.getVariables();
+        int size = vars.size();
+        boolean modified = false;
+        for (int i = 0; i < size; i++) {
+            LogicalVariable var = vars.get(i);
+            List<LogicalVariable> equivalentVars = equivalentVarsMap.get(var);
+            if (equivalentVars == null) {
+                continue;
+            }
+            // Replace with equivalence class representative.
+            LogicalVariable representative = equivalentVars.get(0);
+            if (representative != var) {
+                vars.set(i, equivalentVars.get(0));
+                modified = true;
+            }
+        }
+        return modified;
+    }
+
+    private boolean replaceUnionAllVars(UnionAllOperator op) throws AlgebricksException {
+        boolean modified = false;
+        for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping : op.getVariableMappings()) {
+            List<LogicalVariable> firstEquivalentVars = equivalentVarsMap.get(varMapping.first);
+            List<LogicalVariable> secondEquivalentVars = equivalentVarsMap.get(varMapping.second);
+            // Replace variables with their representative.
+            if (firstEquivalentVars != null) {
+                varMapping.first = firstEquivalentVars.get(0);
+                modified = true;
+            }
+            if (secondEquivalentVars != null) {
+                varMapping.second = secondEquivalentVars.get(0);
+                modified = true;
+            }
+        }
+        return modified;
+    }
+    
+    private class VariableSubstitutionVisitor implements ILogicalExpressionReferenceTransform {
+        @Override
+        public boolean transform(Mutable<ILogicalExpression> exprRef) {
+            ILogicalExpression e = exprRef.getValue();
+            switch (((AbstractLogicalExpression) e).getExpressionTag()) {
+                case VARIABLE: {
+                    // Replace variable references with their equivalent representative in the equivalence class map.
+                    VariableReferenceExpression varRefExpr = (VariableReferenceExpression) e;
+                    LogicalVariable var = varRefExpr.getVariableReference();
+                    List<LogicalVariable> equivalentVars = equivalentVarsMap.get(var);
+                    if (equivalentVars == null) {
+                        return false;
+                    }
+                    LogicalVariable representative = equivalentVars.get(0);
+                    if (representative != var) {
+                        varRefExpr.setVariable(representative);
+                        return true;
+                    }
+                    return false;
+                }
+                case FUNCTION_CALL: {
+                    AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) e;
+                    boolean modified = false;
+                    for (Mutable<ILogicalExpression> arg : funcExpr.getArguments()) {
+                        if (transform(arg)) {
+                            modified = true;
+                        }
+                    }
+                    return modified;
+                }
+                default: {
+                    return false;
+                }
+            }
+        }
+    }
+}
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
index c53ea0a..e0c2741 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
@@ -23,6 +23,7 @@
 import org.apache.commons.lang3.mutable.Mutable;
 
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
@@ -33,10 +34,14 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
 import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
 
+/**
+ * Removes unused variables from Assign, Unnest, Aggregate, and UnionAll operators.
+ */
 public class RemoveUnusedAssignAndAggregateRule implements IAlgebraicRewriteRule {
 
     @Override
@@ -55,7 +60,7 @@
         if (smthToRemove) {
             removeUnusedAssigns(opRef, toRemove, context);
         }
-        return smthToRemove;
+        return !toRemove.isEmpty();
     }
 
     private void removeUnusedAssigns(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> toRemove,
@@ -87,28 +92,59 @@
 
     private int removeFromAssigns(AbstractLogicalOperator op, Set<LogicalVariable> toRemove,
             IOptimizationContext context) throws AlgebricksException {
-        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-            AssignOperator assign = (AssignOperator) op;
-            if (removeUnusedVarsAndExprs(toRemove, assign.getVariables(), assign.getExpressions())) {
-                context.computeAndSetTypeEnvironmentForOperator(assign);
+        switch (op.getOperatorTag()) {
+            case ASSIGN: {
+                AssignOperator assign = (AssignOperator) op;
+                if (removeUnusedVarsAndExprs(toRemove, assign.getVariables(), assign.getExpressions())) {
+                    context.computeAndSetTypeEnvironmentForOperator(assign);
+                }
+                return assign.getVariables().size();
             }
-            return assign.getVariables().size();
-        } else if (op.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
-            AggregateOperator agg = (AggregateOperator) op;
-            if (removeUnusedVarsAndExprs(toRemove, agg.getVariables(), agg.getExpressions())) {
-                context.computeAndSetTypeEnvironmentForOperator(agg);
+            case AGGREGATE: {
+                AggregateOperator agg = (AggregateOperator) op;
+                if (removeUnusedVarsAndExprs(toRemove, agg.getVariables(), agg.getExpressions())) {
+                    context.computeAndSetTypeEnvironmentForOperator(agg);
+                }
+                return agg.getVariables().size();
             }
-            return agg.getVariables().size();
-        } else if (op.getOperatorTag() == LogicalOperatorTag.UNNEST) {
-            UnnestOperator uOp = (UnnestOperator) op;
-            LogicalVariable pVar = uOp.getPositionalVariable();
-            if (pVar != null && toRemove.contains(pVar)) {
-                uOp.setPositionalVariable(null);
+            case UNNEST: {
+                UnnestOperator uOp = (UnnestOperator) op;
+                LogicalVariable pVar = uOp.getPositionalVariable();
+                if (pVar != null && toRemove.contains(pVar)) {
+                    uOp.setPositionalVariable(null);
+                }
+                break;
+            }
+            case UNIONALL: {
+                UnionAllOperator unionOp = (UnionAllOperator) op;
+                if (removeUnusedVarsFromUnionAll(unionOp, toRemove)) {
+                    context.computeAndSetTypeEnvironmentForOperator(unionOp);
+                }
+                return unionOp.getVariableMappings().size();
             }
         }
         return -1;
     }
 
+    private boolean removeUnusedVarsFromUnionAll(UnionAllOperator unionOp, Set<LogicalVariable> toRemove) {
+        Iterator<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> iter = unionOp.getVariableMappings()
+                .iterator();
+        boolean modified = false;
+        Set<LogicalVariable> removeFromRemoveSet = new HashSet<LogicalVariable>();
+        while (iter.hasNext()) {
+            Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping = iter.next();
+            if (toRemove.contains(varMapping.third)) {
+                iter.remove();
+                modified = true;
+            }
+            // In any case, make sure we do not removing these variables.
+            removeFromRemoveSet.add(varMapping.first);
+            removeFromRemoveSet.add(varMapping.second);
+        }
+        toRemove.removeAll(removeFromRemoveSet);
+        return modified;
+    }
+
     private boolean removeUnusedVarsAndExprs(Set<LogicalVariable> toRemove, List<LogicalVariable> varList,
             List<Mutable<ILogicalExpression>> exprList) {
         boolean changed = false;
@@ -142,22 +178,41 @@
                 }
             }
         }
-        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-            AssignOperator assign = (AssignOperator) op;
-            toRemove.addAll(assign.getVariables());
-        } else if (op.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
-            AggregateOperator agg = (AggregateOperator) op;
-            toRemove.addAll(agg.getVariables());
-        } else if (op.getOperatorTag() == LogicalOperatorTag.UNNEST) {
-            UnnestOperator uOp = (UnnestOperator) op;
-            LogicalVariable pVar = uOp.getPositionalVariable();
-            if (pVar != null) {
-                toRemove.add(pVar);
+        boolean removeUsedVars = true;
+        switch (op.getOperatorTag()) {
+            case ASSIGN: {
+                AssignOperator assign = (AssignOperator) op;
+                toRemove.addAll(assign.getVariables());
+                break;
+            }
+            case AGGREGATE: {
+                AggregateOperator agg = (AggregateOperator) op;
+                toRemove.addAll(agg.getVariables());
+                break;
+            }
+            case UNNEST: {
+                UnnestOperator uOp = (UnnestOperator) op;
+                LogicalVariable pVar = uOp.getPositionalVariable();
+                if (pVar != null) {
+                    toRemove.add(pVar);
+                }
+                break;
+            }
+            case UNIONALL: {
+                UnionAllOperator unionOp = (UnionAllOperator) op;
+                for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping : unionOp
+                        .getVariableMappings()) {
+                    toRemove.add(varMapping.third);
+                }
+                removeUsedVars = false;
+                break;
             }
         }
-        List<LogicalVariable> used = new LinkedList<LogicalVariable>();
-        VariableUtilities.getUsedVariables(op, used);
-        toRemove.removeAll(used);
+        if (removeUsedVars) {
+            List<LogicalVariable> used = new LinkedList<LogicalVariable>();
+            VariableUtilities.getUsedVariables(op, used);
+            toRemove.removeAll(used);
+        }
     }
 
 }
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
index 38cf96e..60a4fbb 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
@@ -38,6 +38,7 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AggregatePOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AssignPOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.DataSourceScanPOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.DistributeResultPOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.EmptyTupleSourcePOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.ExternalGroupByPOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.InMemoryStableSortPOperator;
@@ -245,6 +246,10 @@
                     op.setPhysicalOperator(new SinkWritePOperator());
                     break;
                 }
+                case DISTRIBUTE_RESULT: {
+                    op.setPhysicalOperator(new DistributeResultPOperator());
+                    break;
+                }
                 case WRITE_RESULT: {
                     WriteResultOperator opLoad = (WriteResultOperator) op;
                     LogicalVariable payload;
@@ -267,8 +272,8 @@
                     List<LogicalVariable> secondaryKeys = new ArrayList<LogicalVariable>();
                     getKeys(opInsDel.getPrimaryKeyExpressions(), primaryKeys);
                     getKeys(opInsDel.getSecondaryKeyExpressions(), secondaryKeys);
-                    op.setPhysicalOperator(new IndexInsertDeletePOperator(primaryKeys, secondaryKeys, 
-                    		opInsDel.getFilterExpression(), opInsDel.getDataSourceIndex()));
+                    op.setPhysicalOperator(new IndexInsertDeletePOperator(primaryKeys, secondaryKeys, opInsDel
+                            .getFilterExpression(), opInsDel.getDataSourceIndex()));
                     break;
                 }
                 case SINK: {
diff --git a/algebricks/algebricks-runtime/pom.xml b/algebricks/algebricks-runtime/pom.xml
index e40dfb0..e438283 100644
--- a/algebricks/algebricks-runtime/pom.xml
+++ b/algebricks/algebricks-runtime/pom.xml
@@ -16,8 +16,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/serializer/ResultSerializerFactoryProvider.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/serializer/ResultSerializerFactoryProvider.java
new file mode 100644
index 0000000..4f28c81
--- /dev/null
+++ b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/serializer/ResultSerializerFactoryProvider.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.algebricks.runtime.serializer;
+
+import java.io.PrintStream;
+import java.nio.BufferOverflowException;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.data.IAWriter;
+import edu.uci.ics.hyracks.algebricks.data.IAWriterFactory;
+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
+import edu.uci.ics.hyracks.algebricks.data.IResultSerializerFactoryProvider;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.dataflow.value.IResultSerializer;
+import edu.uci.ics.hyracks.api.dataflow.value.IResultSerializerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class ResultSerializerFactoryProvider implements IResultSerializerFactoryProvider {
+    private static final long serialVersionUID = 1L;
+
+    public static final ResultSerializerFactoryProvider INSTANCE = new ResultSerializerFactoryProvider();
+
+    private ResultSerializerFactoryProvider() {
+    }
+
+    @Override
+    public IResultSerializerFactory getAqlResultSerializerFactoryProvider(final int[] fields,
+            final IPrinterFactory[] printerFactories, final IAWriterFactory writerFactory) {
+        return new IResultSerializerFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public IResultSerializer createResultSerializer(RecordDescriptor inputRecordDesc, PrintStream printStream) {
+                final IAWriter writer = writerFactory.createWriter(fields, printStream, printerFactories,
+                        inputRecordDesc);
+
+                return new IResultSerializer() {
+                    private static final long serialVersionUID = 1L;
+
+                    @Override
+                    public void init() throws HyracksDataException {
+                        try {
+                            writer.init();
+                        } catch (AlgebricksException e) {
+                            throw new HyracksDataException(e);
+                        }
+                    }
+
+                    @Override
+                    public boolean appendTuple(IFrameTupleAccessor tAccess, int tIdx) throws HyracksDataException {
+                        try {
+                            writer.printTuple(tAccess, tIdx);
+                        } catch (BufferOverflowException e) {
+                            return false;
+                        } catch (AlgebricksException e) {
+                            throw new HyracksDataException(e);
+                        }
+                        return true;
+                    }
+                };
+            }
+        };
+    }
+}
diff --git a/algebricks/algebricks-tests/pom.xml b/algebricks/algebricks-tests/pom.xml
index 19e6711..c114881 100644
--- a/algebricks/algebricks-tests/pom.xml
+++ b/algebricks/algebricks-tests/pom.xml
@@ -16,9 +16,11 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
-        </configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
+	  <encoding>UTF-8</encoding>
+	</configuration>
       </plugin>
       <plugin>
         <artifactId>maven-antrun-plugin</artifactId>
diff --git a/algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java b/algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
index b5c4b47..dad7cd0 100644
--- a/algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
+++ b/algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
@@ -3,9 +3,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
- * 
+ *
  *     http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -55,6 +55,7 @@
         ncConfig1.ccPort = TEST_HYRACKS_CC_CLUSTER_NET_PORT;
         ncConfig1.clusterNetIPAddress = "127.0.0.1";
         ncConfig1.dataIPAddress = "127.0.0.1";
+        ncConfig1.datasetIPAddress = "127.0.0.1";
         ncConfig1.nodeId = NC1_ID;
         nc1 = new NodeControllerService(ncConfig1);
         nc1.start();
@@ -64,6 +65,7 @@
         ncConfig2.ccPort = TEST_HYRACKS_CC_CLUSTER_NET_PORT;
         ncConfig2.clusterNetIPAddress = "127.0.0.1";
         ncConfig2.dataIPAddress = "127.0.0.1";
+        ncConfig2.datasetIPAddress = "127.0.0.1";
         ncConfig2.nodeId = NC2_ID;
         nc2 = new NodeControllerService(ncConfig2);
         nc2.start();
@@ -85,4 +87,4 @@
         hcc.waitForCompletion(jobId);
     }
 
-}
\ No newline at end of file
+}
diff --git a/hivesterix/HyracksCodeFormatProfile.xml b/hivesterix/HyracksCodeFormatProfile.xml
new file mode 100755
index 0000000..2cde66d
--- /dev/null
+++ b/hivesterix/HyracksCodeFormatProfile.xml
@@ -0,0 +1,279 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<profiles version="11">
+<profile kind="CodeFormatterProfile" name="HyracksCodeFormatProfile" version="11">
+<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
+<setting id="org.eclipse.jdt.core.compiler.source" value="1.5"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="120"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
+<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
+<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.5"/>
+<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/>
+<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="48"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="9999"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.5"/>
+<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="49"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
+</profile>
+</profiles>
diff --git a/hivesterix/hivesterix-common/pom.xml b/hivesterix/hivesterix-common/pom.xml
new file mode 100644
index 0000000..33d8fb3
--- /dev/null
+++ b/hivesterix/hivesterix-common/pom.xml
@@ -0,0 +1,61 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>hivesterix-common</artifactId>
+	<name>hivesterix-common</name>
+
+	<parent>
+		<artifactId>hivesterix</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+					<encoding>UTF-8</encoding>
+					<fork>true</fork>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<dependencies>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>algebricks-compiler</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-hdfs-core</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-exec</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/common/config/ConfUtil.java b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/common/config/ConfUtil.java
new file mode 100644
index 0000000..648deb6
--- /dev/null
+++ b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/common/config/ConfUtil.java
@@ -0,0 +1,186 @@
+package edu.uci.ics.hivesterix.common.config;

+

+import java.io.BufferedReader;

+import java.io.FileInputStream;

+import java.io.InputStream;

+import java.io.InputStreamReader;

+import java.net.InetAddress;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.List;

+import java.util.Map;

+import java.util.Properties;

+

+import org.apache.hadoop.fs.Path;

+import org.apache.hadoop.hive.conf.HiveConf;

+import org.apache.hadoop.hive.ql.session.SessionState;

+import org.apache.hadoop.mapred.FileInputFormat;

+import org.apache.hadoop.mapred.InputFormat;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hyracks.api.client.HyracksConnection;

+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;

+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;

+import edu.uci.ics.hyracks.api.exceptions.HyracksException;

+import edu.uci.ics.hyracks.api.topology.ClusterTopology;

+

+@SuppressWarnings({ "rawtypes", "deprecation" })

+public class ConfUtil {

+    private static final String clusterPropertiesPath = "conf/cluster.properties";

+    private static final String masterFilePath = "conf/master";

+

+    private static JobConf job;

+    private static HiveConf hconf;

+    private static String[] NCs;

+    private static Map<String, List<String>> ncMapping;

+    private static IHyracksClientConnection hcc = null;

+    private static ClusterTopology topology = null;

+    private static Properties clusterProps;

+    private static Map<String, NodeControllerInfo> ncNameToNcInfos;

+

+    public static JobConf getJobConf(Class<? extends InputFormat> format, Path path) {

+        JobConf conf = new JobConf();

+        if (job != null)

+            conf = job;

+

+        String hadoopPath = System.getProperty("HADOOP_HOME", "/hadoop");

+        Path pathCore = new Path(hadoopPath + "/conf/core-site.xml");

+        conf.addResource(pathCore);

+        Path pathMapRed = new Path(hadoopPath + "/conf/mapred-site.xml");

+        conf.addResource(pathMapRed);

+        Path pathHDFS = new Path(hadoopPath + "/conf/hdfs-site.xml");

+        conf.addResource(pathHDFS);

+

+        conf.setInputFormat(format);

+        FileInputFormat.setInputPaths(conf, path);

+        return conf;

+    }

+

+    public static JobConf getJobConf() {

+        JobConf conf = new JobConf();

+        if (job != null)

+            conf = job;

+

+        String hadoopPath = System.getProperty("HADOOP_HOME", "/hadoop");

+        Path pathCore = new Path(hadoopPath + "/conf/core-site.xml");

+        conf.addResource(pathCore);

+        Path pathMapRed = new Path(hadoopPath + "/conf/mapred-site.xml");

+        conf.addResource(pathMapRed);

+        Path pathHDFS = new Path(hadoopPath + "/conf/hdfs-site.xml");

+        conf.addResource(pathHDFS);

+

+        return conf;

+    }

+

+    public static void setJobConf(JobConf conf) {

+        job = conf;

+    }

+

+    public static void setHiveConf(HiveConf hiveConf) {

+        hconf = hiveConf;

+    }

+

+    public static HiveConf getHiveConf() {

+        if (hconf == null) {

+            hconf = new HiveConf(SessionState.class);

+            hconf.addResource(new Path("conf/hive-default.xml"));

+        }

+        return hconf;

+    }

+

+    public static String[] getNCs() throws HyracksException {

+        if (NCs == null) {

+            try {

+                loadClusterConfig();

+            } catch (Exception e) {

+                throw new HyracksException(e);

+            }

+        }

+        return NCs;

+    }

+

+    public static Map<String, List<String>> getNCMapping() throws HyracksException {

+        if (ncMapping == null) {

+            try {

+                loadClusterConfig();

+            } catch (Exception e) {

+                throw new HyracksException(e);

+            }

+        }

+        return ncMapping;

+    }

+

+    public static Map<String, NodeControllerInfo> getNodeControllerInfo() throws HyracksException {

+        if (ncNameToNcInfos == null) {

+            try {

+                loadClusterConfig();

+            } catch (Exception e) {

+                throw new HyracksException(e);

+            }

+        }

+        return ncNameToNcInfos;

+    }

+

+    private static void loadClusterConfig() {

+        try {

+            getHiveConf();

+

+            /**

+             * load the properties file if it is not loaded

+             */

+            if (clusterProps == null) {

+                clusterProps = new Properties();

+                InputStream confIn = new FileInputStream(clusterPropertiesPath);

+                clusterProps.load(confIn);

+                confIn.close();

+            }

+

+            if (hcc == null) {

+                BufferedReader ipReader = new BufferedReader(new InputStreamReader(new FileInputStream(masterFilePath)));

+                String masterNode = ipReader.readLine();

+                ipReader.close();

+

+                InetAddress[] ips = InetAddress.getAllByName(masterNode);

+                int port = Integer.parseInt(clusterProps.getProperty("CC_CLIENTPORT"));

+                for (InetAddress ip : ips) {

+                    if (ip.getAddress().length <= 4) {

+                        try {

+                            hcc = new HyracksConnection(ip.getHostAddress(), port);

+                            break;

+                        } catch (Exception e) {

+                            continue;

+                        }

+                    }

+                }

+            }

+

+            int mpl = Integer.parseInt(hconf.get("hive.hyracks.parrallelism"));

+            topology = hcc.getClusterTopology();

+            ncNameToNcInfos = hcc.getNodeControllerInfos();

+            NCs = new String[ncNameToNcInfos.size() * mpl];

+            ncMapping = new HashMap<String, List<String>>();

+            int i = 0;

+            for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos.entrySet()) {

+                String ipAddr = InetAddress.getByAddress(entry.getValue().getNetworkAddress().getIpAddress())

+                        .getHostAddress();

+                List<String> matchedNCs = ncMapping.get(ipAddr);

+                if (matchedNCs == null) {

+                    matchedNCs = new ArrayList<String>();

+                    ncMapping.put(ipAddr, matchedNCs);

+                }

+                matchedNCs.add(entry.getKey());

+                for (int j = i * mpl; j < i * mpl + mpl; j++)

+                    NCs[j] = entry.getKey();

+                i++;

+            }

+        } catch (Exception e) {

+            throw new IllegalStateException(e);

+        }

+    }

+

+    public static ClusterTopology getClusterTopology() {

+        if (topology == null)

+            loadClusterConfig();

+        return topology;

+    }

+}

diff --git a/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionConstant.java b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionConstant.java
new file mode 100644
index 0000000..8fb715b
--- /dev/null
+++ b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionConstant.java
@@ -0,0 +1,24 @@
+package edu.uci.ics.hivesterix.logical.expression;

+

+/**

+ * some constants for expression

+ * 

+ * @author yingyib

+ */

+public class ExpressionConstant {

+

+    /**

+     * name space for function identifier

+     */

+    public static String NAMESPACE = "hive";

+

+    /**

+     * field expression: modeled as function in Algebricks

+     */

+    public static String FIELDACCESS = "fieldaccess";

+

+    /**

+     * null string: modeled as null in Algebricks

+     */

+    public static String NULL = "null";

+}

diff --git a/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionTranslator.java b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionTranslator.java
new file mode 100644
index 0000000..662ed83
--- /dev/null
+++ b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/ExpressionTranslator.java
@@ -0,0 +1,209 @@
+package edu.uci.ics.hivesterix.logical.expression;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.FunctionInfo;

+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;

+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;

+

+public class ExpressionTranslator {

+

+    public static Object getHiveExpression(ILogicalExpression expr, IVariableTypeEnvironment env) throws Exception {

+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {

+            /**

+             * function expression

+             */

+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;

+            IFunctionInfo funcInfo = funcExpr.getFunctionInfo();

+            FunctionIdentifier fid = funcInfo.getFunctionIdentifier();

+

+            if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

+                Object info = ((HiveFunctionInfo) funcInfo).getInfo();

+                ExprNodeFieldDesc desc = (ExprNodeFieldDesc) info;

+                return new ExprNodeFieldDesc(desc.getTypeInfo(), desc.getDesc(), desc.getFieldName(), desc.getIsList());

+            }

+

+            if (fid.getName().equals(ExpressionConstant.NULL)) {

+                return new ExprNodeNullDesc();

+            }

+

+            /**

+             * argument expressions: translate argument expressions recursively

+             * first, this logic is shared in scalar, aggregation and unnesting

+             * function

+             */

+            List<Mutable<ILogicalExpression>> arguments = funcExpr.getArguments();

+            List<ExprNodeDesc> parameters = new ArrayList<ExprNodeDesc>();

+            for (Mutable<ILogicalExpression> argument : arguments) {

+                /**

+                 * parameters could not be aggregate function desc

+                 */

+                ExprNodeDesc parameter = (ExprNodeDesc) getHiveExpression(argument.getValue(), env);

+                parameters.add(parameter);

+            }

+

+            /**

+             * get expression

+             */

+            if (funcExpr instanceof ScalarFunctionCallExpression) {

+                String udfName = HiveAlgebricksBuiltInFunctionMap.INSTANCE.getHiveFunctionName(fid);

+                GenericUDF udf;

+                if (udfName != null) {

+                    /**

+                     * get corresponding function info for built-in functions

+                     */

+                    FunctionInfo fInfo = FunctionRegistry.getFunctionInfo(udfName);

+                    udf = fInfo.getGenericUDF();

+

+                    int inputSize = parameters.size();

+                    List<ExprNodeDesc> currentDescs = new ArrayList<ExprNodeDesc>();

+

+                    // generate expression tree if necessary

+                    while (inputSize > 2) {

+                        int pairs = inputSize / 2;

+                        for (int i = 0; i < pairs; i++) {

+                            List<ExprNodeDesc> descs = new ArrayList<ExprNodeDesc>();

+                            descs.add(parameters.get(2 * i));

+                            descs.add(parameters.get(2 * i + 1));

+                            ExprNodeDesc desc = ExprNodeGenericFuncDesc.newInstance(udf, descs);

+                            currentDescs.add(desc);

+                        }

+

+                        if (inputSize % 2 != 0) {

+                            // List<ExprNodeDesc> descs = new

+                            // ArrayList<ExprNodeDesc>();

+                            // ExprNodeDesc lastExpr =

+                            // currentDescs.remove(currentDescs.size() - 1);

+                            // descs.add(lastExpr);

+                            currentDescs.add(parameters.get(inputSize - 1));

+                            // ExprNodeDesc desc =

+                            // ExprNodeGenericFuncDesc.newInstance(udf, descs);

+                            // currentDescs.add(desc);

+                        }

+                        inputSize = currentDescs.size();

+                        parameters.clear();

+                        parameters.addAll(currentDescs);

+                        currentDescs.clear();

+                    }

+

+                } else {

+                    Object secondInfo = ((HiveFunctionInfo) funcInfo).getInfo();

+                    if (secondInfo != null) {

+

+                        /**

+                         * for GenericUDFBridge: we should not call get type of

+                         * this hive expression, because parameters may have

+                         * been changed!

+                         */

+                        ExprNodeGenericFuncDesc hiveExpr = (ExprNodeGenericFuncDesc) ((HiveFunctionInfo) funcInfo)

+                                .getInfo();

+                        udf = hiveExpr.getGenericUDF();

+                    } else {

+                        /**

+                         * for other generic UDF

+                         */

+                        Class<?> udfClass;

+                        try {

+                            udfClass = Class.forName(fid.getName());

+                            udf = (GenericUDF) udfClass.newInstance();

+                        } catch (Exception e) {

+                            e.printStackTrace();

+                            throw new AlgebricksException(e.getMessage());

+                        }

+                    }

+                }

+                /**

+                 * get hive generic function expression

+                 */

+                ExprNodeDesc desc = ExprNodeGenericFuncDesc.newInstance(udf, parameters);

+                return desc;

+            } else if (funcExpr instanceof AggregateFunctionCallExpression) {

+                /**

+                 * hive aggregation info

+                 */

+                AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo())

+                        .getInfo();

+                /**

+                 * set parameters

+                 */

+                aggregateDesc.setParameters((ArrayList<ExprNodeDesc>) parameters);

+

+                List<TypeInfo> originalParameterTypeInfos = new ArrayList<TypeInfo>();

+                for (ExprNodeDesc parameter : parameters) {

+                    if (parameter.getTypeInfo() instanceof StructTypeInfo) {

+                        originalParameterTypeInfos.add(TypeInfoFactory.doubleTypeInfo);

+                    } else

+                        originalParameterTypeInfos.add(parameter.getTypeInfo());

+                }

+

+                GenericUDAFEvaluator eval = FunctionRegistry.getGenericUDAFEvaluator(

+                        aggregateDesc.getGenericUDAFName(), originalParameterTypeInfos, aggregateDesc.getDistinct(),

+                        false);

+

+                AggregationDesc newAggregateDesc = new AggregationDesc(aggregateDesc.getGenericUDAFName(), eval,

+                        aggregateDesc.getParameters(), aggregateDesc.getDistinct(), aggregateDesc.getMode());

+                return newAggregateDesc;

+            } else if (funcExpr instanceof UnnestingFunctionCallExpression) {

+                /**

+                 * type inference for UDTF function

+                 */

+                UDTFDesc hiveDesc = (UDTFDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo()).getInfo();

+                String funcName = hiveDesc.getUDTFName();

+                FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName);

+                GenericUDTF udtf = fi.getGenericUDTF();

+                UDTFDesc desc = new UDTFDesc(udtf);

+                return desc;

+            } else {

+                throw new IllegalStateException("unrecognized function expression " + expr.getClass().getName());

+            }

+        } else if ((expr.getExpressionTag() == LogicalExpressionTag.VARIABLE)) {

+            /**

+             * get type for variable in the environment

+             */

+            VariableReferenceExpression varExpr = (VariableReferenceExpression) expr;

+            LogicalVariable var = varExpr.getVariableReference();

+            TypeInfo typeInfo = (TypeInfo) env.getVarType(var);

+            ExprNodeDesc desc = new ExprNodeColumnDesc(typeInfo, var.toString(), "", false);

+            return desc;

+        } else if ((expr.getExpressionTag() == LogicalExpressionTag.CONSTANT)) {

+            /**

+             * get expression for constant in the environment

+             */

+            ConstantExpression varExpr = (ConstantExpression) expr;

+            Object value = ((HivesterixConstantValue) varExpr.getValue()).getObject();

+            ExprNodeDesc desc = new ExprNodeConstantDesc(value);

+            return desc;

+        } else {

+            throw new IllegalStateException("illegal expressions " + expr.getClass().getName());

+        }

+    }

+}

diff --git a/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveAlgebricksBuiltInFunctionMap.java b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveAlgebricksBuiltInFunctionMap.java
new file mode 100644
index 0000000..56890eb
--- /dev/null
+++ b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveAlgebricksBuiltInFunctionMap.java
@@ -0,0 +1,82 @@
+package edu.uci.ics.hivesterix.logical.expression;

+

+import java.util.HashMap;

+

+import org.apache.hadoop.hive.ql.exec.Description;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+

+public class HiveAlgebricksBuiltInFunctionMap {

+

+    /**

+     * hive auqa builtin function map instance

+     */

+    public static HiveAlgebricksBuiltInFunctionMap INSTANCE = new HiveAlgebricksBuiltInFunctionMap();

+

+    /**

+     * hive to Algebricks function name mapping

+     */

+    private HashMap<String, FunctionIdentifier> hiveToAlgebricksMap = new HashMap<String, FunctionIdentifier>();

+

+    /**

+     * Algebricks to hive function name mapping

+     */

+    private HashMap<FunctionIdentifier, String> AlgebricksToHiveMap = new HashMap<FunctionIdentifier, String>();

+

+    /**

+     * the bi-directional mapping between hive functions and Algebricks

+     * functions

+     */

+    private HiveAlgebricksBuiltInFunctionMap() {

+        hiveToAlgebricksMap.put("and", AlgebricksBuiltinFunctions.AND);

+        hiveToAlgebricksMap.put("or", AlgebricksBuiltinFunctions.OR);

+        hiveToAlgebricksMap.put("!", AlgebricksBuiltinFunctions.NOT);

+        hiveToAlgebricksMap.put("not", AlgebricksBuiltinFunctions.NOT);

+        hiveToAlgebricksMap.put("=", AlgebricksBuiltinFunctions.EQ);

+        hiveToAlgebricksMap.put("<>", AlgebricksBuiltinFunctions.NEQ);

+        hiveToAlgebricksMap.put(">", AlgebricksBuiltinFunctions.GT);

+        hiveToAlgebricksMap.put("<", AlgebricksBuiltinFunctions.LT);

+        hiveToAlgebricksMap.put(">=", AlgebricksBuiltinFunctions.GE);

+        hiveToAlgebricksMap.put("<=", AlgebricksBuiltinFunctions.LE);

+

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.AND, "and");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.OR, "or");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NOT, "!");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NOT, "not");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.EQ, "=");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.NEQ, "<>");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.GT, ">");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.LT, "<");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.GE, ">=");

+        AlgebricksToHiveMap.put(AlgebricksBuiltinFunctions.LE, "<=");

+    }

+

+    /**

+     * get hive function name from Algebricks function identifier

+     * 

+     * @param AlgebricksId

+     * @return hive

+     */

+    public String getHiveFunctionName(FunctionIdentifier AlgebricksId) {

+        return AlgebricksToHiveMap.get(AlgebricksId);

+    }

+

+    /**

+     * get hive UDF or Generic class's corresponding built-in functions

+     * 

+     * @param funcClass

+     * @return function identifier

+     */

+    public FunctionIdentifier getAlgebricksFunctionId(Class<?> funcClass) {

+        Description annotation = (Description) funcClass.getAnnotation(Description.class);

+        String hiveUDFName = "";

+        if (annotation == null) {

+            hiveUDFName = null;

+            return null;

+        } else {

+            hiveUDFName = annotation.name();

+            return hiveToAlgebricksMap.get(hiveUDFName);

+        }

+    }

+}

diff --git a/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveExpressionTypeComputer.java b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveExpressionTypeComputer.java
new file mode 100644
index 0000000..e10e8c1
--- /dev/null
+++ b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveExpressionTypeComputer.java
@@ -0,0 +1,179 @@
+package edu.uci.ics.hivesterix.logical.expression;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.FunctionInfo;

+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+

+public class HiveExpressionTypeComputer implements IExpressionTypeComputer {

+

+    public static IExpressionTypeComputer INSTANCE = new HiveExpressionTypeComputer();

+

+    @Override

+    public Object getType(ILogicalExpression expr, IMetadataProvider<?, ?> metadataProvider,

+            IVariableTypeEnvironment env) throws AlgebricksException {

+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {

+            /**

+             * function expression

+             */

+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;

+            IFunctionInfo funcInfo = funcExpr.getFunctionInfo();

+

+            /**

+             * argument expressions, types, object inspectors

+             */

+            List<Mutable<ILogicalExpression>> arguments = funcExpr.getArguments();

+            List<TypeInfo> argumentTypes = new ArrayList<TypeInfo>();

+

+            /**

+             * get types of argument

+             */

+            for (Mutable<ILogicalExpression> argument : arguments) {

+                TypeInfo type = (TypeInfo) getType(argument.getValue(), metadataProvider, env);

+                argumentTypes.add(type);

+            }

+

+            ObjectInspector[] childrenOIs = new ObjectInspector[argumentTypes.size()];

+

+            /**

+             * get object inspector

+             */

+            for (int i = 0; i < argumentTypes.size(); i++) {

+                childrenOIs[i] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(argumentTypes.get(i));

+            }

+

+            /**

+             * type inference for scalar function

+             */

+            if (funcExpr instanceof ScalarFunctionCallExpression) {

+

+                FunctionIdentifier AlgebricksId = funcInfo.getFunctionIdentifier();

+                Object functionInfo = ((HiveFunctionInfo) funcInfo).getInfo();

+                String udfName = HiveAlgebricksBuiltInFunctionMap.INSTANCE.getHiveFunctionName(AlgebricksId);

+                GenericUDF udf;

+                if (udfName != null) {

+                    /**

+                     * get corresponding function info for built-in functions

+                     */

+                    FunctionInfo fInfo = FunctionRegistry.getFunctionInfo(udfName);

+                    udf = fInfo.getGenericUDF();

+                } else if (functionInfo != null) {

+                    /**

+                     * for GenericUDFBridge: we should not call get type of this

+                     * hive expression, because parameters may have been

+                     * changed!

+                     */

+                    ExprNodeGenericFuncDesc hiveExpr = (ExprNodeGenericFuncDesc) functionInfo;

+                    udf = hiveExpr.getGenericUDF();

+                } else {

+                    /**

+                     * for other generic UDF

+                     */

+                    Class<?> udfClass;

+                    try {

+                        udfClass = Class.forName(AlgebricksId.getName());

+                        udf = (GenericUDF) udfClass.newInstance();

+                    } catch (Exception e) {

+                        e.printStackTrace();

+                        throw new AlgebricksException(e.getMessage());

+                    }

+                }

+                /**

+                 * doing the actual type inference

+                 */

+                ObjectInspector oi = null;

+                try {

+                    oi = udf.initialize(childrenOIs);

+                } catch (Exception e) {

+                    e.printStackTrace();

+                }

+

+                TypeInfo exprType = TypeInfoUtils.getTypeInfoFromObjectInspector(oi);

+                return exprType;

+

+            } else if (funcExpr instanceof AggregateFunctionCallExpression) {

+                /**

+                 * hive aggregation info

+                 */

+                AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo())

+                        .getInfo();

+                /**

+                 * type inference for aggregation function

+                 */

+                GenericUDAFEvaluator result = aggregateDesc.getGenericUDAFEvaluator();

+

+                ObjectInspector returnOI = null;

+                try {

+                    returnOI = result.init(aggregateDesc.getMode(), childrenOIs);

+                } catch (HiveException e) {

+                    e.printStackTrace();

+                }

+                TypeInfo exprType = TypeInfoUtils.getTypeInfoFromObjectInspector(returnOI);

+                return exprType;

+            } else if (funcExpr instanceof UnnestingFunctionCallExpression) {

+                /**

+                 * type inference for UDTF function

+                 */

+                UDTFDesc hiveDesc = (UDTFDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo()).getInfo();

+                GenericUDTF udtf = hiveDesc.getGenericUDTF();

+                ObjectInspector returnOI = null;

+                try {

+                    returnOI = udtf.initialize(childrenOIs);

+                } catch (HiveException e) {

+                    e.printStackTrace();

+                }

+                TypeInfo exprType = TypeInfoUtils.getTypeInfoFromObjectInspector(returnOI);

+                return exprType;

+            } else {

+                throw new IllegalStateException("unrecognized function expression " + expr.getClass().getName());

+            }

+        } else if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {

+            /**

+             * get type for variable in the environment

+             */

+            VariableReferenceExpression varExpr = (VariableReferenceExpression) expr;

+            LogicalVariable var = varExpr.getVariableReference();

+            TypeInfo type = (TypeInfo) env.getVarType(var);

+            return type;

+        } else if (expr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {

+            /**

+             * get type for constant, from its java class

+             */

+            ConstantExpression constExpr = (ConstantExpression) expr;

+            HivesterixConstantValue value = (HivesterixConstantValue) constExpr.getValue();

+            TypeInfo type = TypeInfoFactory.getPrimitiveTypeInfoFromJavaPrimitive(value.getObject().getClass());

+            return type;

+        } else {

+            throw new IllegalStateException("illegal expressions " + expr.getClass().getName());

+        }

+    }

+}

diff --git a/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveFunctionInfo.java b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveFunctionInfo.java
new file mode 100644
index 0000000..ced8d02
--- /dev/null
+++ b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveFunctionInfo.java
@@ -0,0 +1,36 @@
+package edu.uci.ics.hivesterix.logical.expression;

+

+import java.io.Serializable;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;

+

+public class HiveFunctionInfo implements IFunctionInfo, Serializable {

+

+    private static final long serialVersionUID = 1L;

+

+    /**

+     * primary function identifier

+     */

+    private transient FunctionIdentifier fid;

+

+    /**

+     * secondary function identifier: function name

+     */

+    private transient Object secondaryFid;

+

+    public HiveFunctionInfo(FunctionIdentifier fid, Object secondFid) {

+        this.fid = fid;

+        this.secondaryFid = secondFid;

+    }

+

+    @Override

+    public FunctionIdentifier getFunctionIdentifier() {

+        return fid;

+    }

+

+    public Object getInfo() {

+        return secondaryFid;

+    }

+

+}

diff --git a/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveMergeAggregationExpressionFactory.java b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveMergeAggregationExpressionFactory.java
new file mode 100644
index 0000000..b77fe49
--- /dev/null
+++ b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveMergeAggregationExpressionFactory.java
@@ -0,0 +1,74 @@
+package edu.uci.ics.hivesterix.logical.expression;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+/**
+ * generate merge aggregation expression from an aggregation expression
+ * 
+ * @author yingyib
+ */
+public class HiveMergeAggregationExpressionFactory implements IMergeAggregationExpressionFactory {
+
+    public static IMergeAggregationExpressionFactory INSTANCE = new HiveMergeAggregationExpressionFactory();
+
+    @Override
+    public ILogicalExpression createMergeAggregation(ILogicalExpression expr, IOptimizationContext context)
+            throws AlgebricksException {
+        /**
+         * type inference for scalar function
+         */
+        if (expr instanceof AggregateFunctionCallExpression) {
+            AggregateFunctionCallExpression funcExpr = (AggregateFunctionCallExpression) expr;
+            /**
+             * hive aggregation info
+             */
+            AggregationDesc aggregator = (AggregationDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo()).getInfo();
+            LogicalVariable inputVar = context.newVar();
+            ExprNodeDesc col = new ExprNodeColumnDesc(TypeInfoFactory.voidTypeInfo, inputVar.toString(), null, false);
+            ArrayList<ExprNodeDesc> parameters = new ArrayList<ExprNodeDesc>();
+            parameters.add(col);
+
+            GenericUDAFEvaluator.Mode mergeMode;
+            if (aggregator.getMode() == GenericUDAFEvaluator.Mode.PARTIAL1)
+                mergeMode = GenericUDAFEvaluator.Mode.PARTIAL2;
+            else if (aggregator.getMode() == GenericUDAFEvaluator.Mode.COMPLETE)
+                mergeMode = GenericUDAFEvaluator.Mode.FINAL;
+            else
+                mergeMode = aggregator.getMode();
+            AggregationDesc mergeDesc = new AggregationDesc(aggregator.getGenericUDAFName(),
+                    aggregator.getGenericUDAFEvaluator(), parameters, aggregator.getDistinct(), mergeMode);
+
+            String UDAFName = mergeDesc.getGenericUDAFName();
+            List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();
+            arguments.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(inputVar)));
+
+            FunctionIdentifier funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, UDAFName + "("
+                    + mergeDesc.getMode() + ")");
+            HiveFunctionInfo funcInfo = new HiveFunctionInfo(funcId, mergeDesc);
+            AggregateFunctionCallExpression aggregationExpression = new AggregateFunctionCallExpression(funcInfo,
+                    false, arguments);
+            return aggregationExpression;
+        } else {
+            throw new IllegalStateException("illegal expressions " + expr.getClass().getName());
+        }
+    }
+
+}
diff --git a/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveNullableTypeComputer.java b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveNullableTypeComputer.java
new file mode 100644
index 0000000..906e3ce
--- /dev/null
+++ b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HiveNullableTypeComputer.java
@@ -0,0 +1,15 @@
+package edu.uci.ics.hivesterix.logical.expression;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.INullableTypeComputer;
+
+public class HiveNullableTypeComputer implements INullableTypeComputer {
+
+    public static INullableTypeComputer INSTANCE = new HiveNullableTypeComputer();
+
+    @Override
+    public Object makeNullableType(Object type) throws AlgebricksException {
+        return type;
+    }
+
+}
diff --git a/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivePartialAggregationTypeComputer.java b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivePartialAggregationTypeComputer.java
new file mode 100644
index 0000000..c74966c
--- /dev/null
+++ b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivePartialAggregationTypeComputer.java
@@ -0,0 +1,102 @@
+package edu.uci.ics.hivesterix.logical.expression;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IPartialAggregationTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class HivePartialAggregationTypeComputer implements IPartialAggregationTypeComputer {
+
+    public static IPartialAggregationTypeComputer INSTANCE = new HivePartialAggregationTypeComputer();
+
+    @Override
+    public Object getType(ILogicalExpression expr, IVariableTypeEnvironment env,
+            IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            IExpressionTypeComputer tc = HiveExpressionTypeComputer.INSTANCE;
+            /**
+             * function expression
+             */
+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+
+            /**
+             * argument expressions, types, object inspectors
+             */
+            List<Mutable<ILogicalExpression>> arguments = funcExpr.getArguments();
+            List<TypeInfo> argumentTypes = new ArrayList<TypeInfo>();
+
+            /**
+             * get types of argument
+             */
+            for (Mutable<ILogicalExpression> argument : arguments) {
+                TypeInfo type = (TypeInfo) tc.getType(argument.getValue(), metadataProvider, env);
+                argumentTypes.add(type);
+            }
+
+            ObjectInspector[] childrenOIs = new ObjectInspector[argumentTypes.size()];
+
+            /**
+             * get object inspector
+             */
+            for (int i = 0; i < argumentTypes.size(); i++) {
+                childrenOIs[i] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(argumentTypes.get(i));
+            }
+
+            /**
+             * type inference for scalar function
+             */
+            if (funcExpr instanceof AggregateFunctionCallExpression) {
+                /**
+                 * hive aggregation info
+                 */
+                AggregationDesc aggregateDesc = (AggregationDesc) ((HiveFunctionInfo) funcExpr.getFunctionInfo())
+                        .getInfo();
+                /**
+                 * type inference for aggregation function
+                 */
+                GenericUDAFEvaluator result = aggregateDesc.getGenericUDAFEvaluator();
+
+                ObjectInspector returnOI = null;
+                try {
+                    returnOI = result.init(getPartialMode(aggregateDesc.getMode()), childrenOIs);
+                } catch (HiveException e) {
+                    e.printStackTrace();
+                }
+                TypeInfo exprType = TypeInfoUtils.getTypeInfoFromObjectInspector(returnOI);
+                return exprType;
+            } else {
+                throw new IllegalStateException("illegal expressions " + expr.getClass().getName());
+            }
+        } else {
+            throw new IllegalStateException("illegal expressions " + expr.getClass().getName());
+        }
+    }
+
+    private Mode getPartialMode(Mode mode) {
+        Mode partialMode;
+        if (mode == Mode.FINAL)
+            partialMode = Mode.PARTIAL2;
+        else if (mode == Mode.COMPLETE)
+            partialMode = Mode.PARTIAL1;
+        else
+            partialMode = mode;
+        return partialMode;
+    }
+}
diff --git a/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivesterixConstantValue.java b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivesterixConstantValue.java
new file mode 100644
index 0000000..3d35e1f
--- /dev/null
+++ b/hivesterix/hivesterix-common/src/main/java/edu/uci/ics/hivesterix/logical/expression/HivesterixConstantValue.java
@@ -0,0 +1,55 @@
+package edu.uci.ics.hivesterix.logical.expression;
+
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
+
+public class HivesterixConstantValue implements IAlgebricksConstantValue {
+
+    private Object object;
+
+    public HivesterixConstantValue(Object object) {
+        this.setObject(object);
+    }
+
+    @Override
+    public boolean isFalse() {
+        return object == Boolean.FALSE;
+    }
+
+    @Override
+    public boolean isNull() {
+        return object == null;
+    }
+
+    @Override
+    public boolean isTrue() {
+        return object == Boolean.TRUE;
+    }
+
+    public void setObject(Object object) {
+        this.object = object;
+    }
+
+    public Object getObject() {
+        return object;
+    }
+
+    @Override
+    public String toString() {
+        return object.toString();
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (!(o instanceof HivesterixConstantValue)) {
+            return false;
+        }
+        HivesterixConstantValue v2 = (HivesterixConstantValue) o;
+        return object.equals(v2.getObject());
+    }
+
+    @Override
+    public int hashCode() {
+        return object.hashCode();
+    }
+
+}
diff --git a/hivesterix/hivesterix-common/src/test/java/edu/uci/ics/hyracks/AppTest.java b/hivesterix/hivesterix-common/src/test/java/edu/uci/ics/hyracks/AppTest.java
new file mode 100644
index 0000000..0c701c8
--- /dev/null
+++ b/hivesterix/hivesterix-common/src/test/java/edu/uci/ics/hyracks/AppTest.java
@@ -0,0 +1,38 @@
+package edu.uci.ics.hyracks;
+
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+
+/**
+ * Unit test for simple App.
+ */
+public class AppTest 
+    extends TestCase
+{
+    /**
+     * Create the test case
+     *
+     * @param testName name of the test case
+     */
+    public AppTest( String testName )
+    {
+        super( testName );
+    }
+
+    /**
+     * @return the suite of tests being tested
+     */
+    public static Test suite()
+    {
+        return new TestSuite( AppTest.class );
+    }
+
+    /**
+     * Rigourous Test :-)
+     */
+    public void testApp()
+    {
+        assertTrue( true );
+    }
+}
diff --git a/hivesterix/hivesterix-dist/conf/cluster.properties b/hivesterix/hivesterix-dist/conf/cluster.properties
new file mode 100644
index 0000000..2d2401a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/conf/cluster.properties
@@ -0,0 +1,37 @@
+#The CC port for Hyracks clients
+CC_CLIENTPORT=3099
+
+#The CC port for Hyracks cluster management
+CC_CLUSTERPORT=1099
+
+#The directory of hyracks binaries
+HYRACKS_HOME=../../../../hyracks
+
+#The tmp directory for cc to install jars
+CCTMP_DIR=/tmp/t1
+
+#The tmp directory for nc to install jars
+NCTMP_DIR=/tmp/t2
+
+#The directory to put cc logs
+CCLOGS_DIR=$CCTMP_DIR/logs
+
+#The directory to put nc logs
+NCLOGS_DIR=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS="/tmp/t3,/tmp/t4"
+
+#The JAVA_HOME
+JAVA_HOME=$JAVA_HOME
+
+#The frame size of the internal dataflow engine
+FRAME_SIZE=65536
+
+#CC JAVA_OPTS
+CCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+# Yourkit option: -agentpath:/grid/0/dev/vborkar/tools/yjp-10.0.4/bin/linux-x86-64/libyjpagent.so=port=20001"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7002,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/hivesterix/hivesterix-dist/conf/configuration.xsl b/hivesterix/hivesterix-dist/conf/configuration.xsl
new file mode 100644
index 0000000..377cdbe
--- /dev/null
+++ b/hivesterix/hivesterix-dist/conf/configuration.xsl
@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
+<xsl:output method="html"/>
+<xsl:template match="configuration">
+<html>
+<body>
+<table border="1">
+<tr>
+ <td>name</td>
+ <td>value</td>
+ <td>description</td>
+</tr>
+<xsl:for-each select="property">
+<tr>
+  <td><a name="{name}"><xsl:value-of select="name"/></a></td>
+  <td><xsl:value-of select="value"/></td>
+  <td><xsl:value-of select="description"/></td>
+</tr>
+</xsl:for-each>
+</table>
+</body>
+</html>
+</xsl:template>
+</xsl:stylesheet>
diff --git a/hivesterix/hivesterix-dist/conf/debugnc.properties b/hivesterix/hivesterix-dist/conf/debugnc.properties
new file mode 100755
index 0000000..27afa26
--- /dev/null
+++ b/hivesterix/hivesterix-dist/conf/debugnc.properties
@@ -0,0 +1,12 @@
+#The tmp directory for nc to install jars
+NCTMP_DIR2=/tmp/t-1
+
+#The directory to put nc logs
+NCLOGS_DIR2=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS2="/tmp/t-2,/tmp/t-3"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS2="-Xdebug -Xrunjdwp:transport=dt_socket,address=7003,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/hivesterix/hivesterix-dist/conf/hive-default.xml b/hivesterix/hivesterix-dist/conf/hive-default.xml
new file mode 100644
index 0000000..587eede
--- /dev/null
+++ b/hivesterix/hivesterix-dist/conf/hive-default.xml
@@ -0,0 +1,758 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+	<!-- Hive Configuration can either be stored in this file or in the hadoop 
+		configuration files -->
+	<!-- that are implied by Hadoop setup variables. -->
+	<!-- Aside from Hadoop setup variables - this file is provided as a convenience 
+		so that Hive -->
+	<!-- users do not have to edit hadoop configuration files (that may be managed 
+		as a centralized -->
+	<!-- resource). -->
+
+	<!-- Hive Execution Parameters -->
+	<property>
+		<name>mapred.reduce.tasks</name>
+		<value>-1</value>
+		<description>The default number of reduce tasks per job. Typically set
+			to a prime close to the number of available hosts. Ignored when
+			mapred.job.tracker is "local". Hadoop set this to 1 by default,
+			whereas hive uses -1 as its default value.
+			By setting this property to -1, Hive will automatically figure out what
+			should be the number of reducers.
+  </description>
+
+        <property>
+		<name>hive.hyracks.connectorpolicy</name>
+		<value>PIPELINING</value>
+        </property>
+
+	<property>
+		<name>hive.hyracks.parrallelism</name>
+		<value>4</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.groupby.external</name>
+		<value>true</value>
+	</property>
+	
+	<property>
+		<name>hive.algebricks.groupby.external.memory</name>
+		<value>33554432</value>
+	</property>
+	
+	<property>
+		<name>hive.algebricks.sort.memory</name>
+		<value>33554432</value>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.bytes.per.reducer</name>
+		<value>1000000000</value>
+		<description>size per reducer.The default is 1G, i.e if the input size
+			is 10G, it will use 10 reducers.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.max</name>
+		<value>999</value>
+		<description>max number of reducers will be used. If the one
+			specified in the configuration parameter mapred.reduce.tasks is
+			negative, hive will use this one as the max number of reducers when
+			automatically determine number of reducers.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.scratchdir</name>
+		<value>/hive-${user.name}</value>
+		<description>Scratch space for Hive jobs</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode</name>
+		<value>false</value>
+		<description>whether hive is running in test mode. If yes, it turns on
+			sampling and prefixes the output tablename</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.prefix</name>
+		<value>test_</value>
+		<description>if hive is running in test mode, prefixes the output
+			table by this string</description>
+	</property>
+
+	<!-- If the input table is not bucketed, the denominator of the tablesample 
+		is determinied by the parameter below -->
+	<!-- For example, the following query: -->
+	<!-- INSERT OVERWRITE TABLE dest -->
+	<!-- SELECT col1 from src -->
+	<!-- would be converted to -->
+	<!-- INSERT OVERWRITE TABLE test_dest -->
+	<!-- SELECT col1 from src TABLESAMPLE (BUCKET 1 out of 32 on rand(1)) -->
+	<property>
+		<name>hive.test.mode.samplefreq</name>
+		<value>32</value>
+		<description>if hive is running in test mode and table is not
+			bucketed, sampling frequency</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.nosamplelist</name>
+		<value></value>
+		<description>if hive is running in test mode, dont sample the above
+			comma seperated list of tables</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.local</name>
+		<value>true</value>
+		<description>controls whether to connect to remove metastore server or
+			open a new metastore server in Hive Client JVM</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionURL</name>
+		<value>jdbc:derby:;databaseName=metastore_db;create=true</value>
+		<description>JDBC connect string for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionDriverName</name>
+		<value>org.apache.derby.jdbc.EmbeddedDriver</value>
+		<description>Driver class name for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.PersistenceManagerFactoryClass</name>
+		<value>org.datanucleus.jdo.JDOPersistenceManagerFactory</value>
+		<description>class implementing the jdo persistence</description>
+	</property>
+
+	<property>
+		<name>datanucleus.connectionPoolingType</name>
+		<value>DBCP</value>
+		<description>Uses a DBCP connection pool for JDBC metastore
+		</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.DetachAllOnCommit</name>
+		<value>true</value>
+		<description>detaches all objects from session so that they can be
+			used after transaction is committed</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.NonTransactionalRead</name>
+		<value>true</value>
+		<description>reads outside of transactions</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionUserName</name>
+		<value>APP</value>
+		<description>username to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionPassword</name>
+		<value>mine</value>
+		<description>password to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateTables</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateColumns</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateConstraints</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.storeManagerType</name>
+		<value>rdbms</value>
+		<description>metadata store type</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoCreateSchema</name>
+		<value>true</value>
+		<description>creates necessary schema on a startup if one doesn't
+			exist. set this to false, after creating it once</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoStartMechanismMode</name>
+		<value>checked</value>
+		<description>throw exception if metadata tables are incorrect
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.transactionIsolation</name>
+		<value>read-committed</value>
+		<description>Default transaction isolation level for identity
+			generation. </description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2</name>
+		<value>false</value>
+		<description>Use a level 2 cache. Turn this off if metadata is changed
+			independently of hive metastore server</description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2.type</name>
+		<value>SOFT</value>
+		<description>SOFT=soft reference based cache, WEAK=weak reference
+			based cache.</description>
+	</property>
+
+	<property>
+		<name>datanucleus.identifierFactory</name>
+		<value>datanucleus</value>
+		<description>Name of the identifier factory to use when generating
+			table/column names etc. 'datanucleus' is used for backward
+			compatibility</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.warehouse.dir</name>
+		<value>/user/hivesterix</value>
+		<description>location of default database for the warehouse
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.connect.retries</name>
+		<value>5</value>
+		<description>Number of retries while opening a connection to metastore
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.rawstore.impl</name>
+		<value>org.apache.hadoop.hive.metastore.ObjectStore</value>
+		<description>Name of the class that implements
+			org.apache.hadoop.hive.metastore.rawstore interface. This class is
+			used to store and retrieval of raw metadata objects such as table,
+			database</description>
+	</property>
+
+	<property>
+		<name>hive.default.fileformat</name>
+		<value>TextFile</value>
+		<description>Default file format for CREATE TABLE statement. Options
+			are TextFile and SequenceFile. Users can explicitly say CREATE TABLE
+			... STORED AS &lt;TEXTFILE|SEQUENCEFILE&gt; to override</description>
+	</property>
+
+	<property>
+		<name>hive.fileformat.check</name>
+		<value>true</value>
+		<description>Whether to check file format or not when loading data
+			files</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr</name>
+		<value>true</value>
+		<description>Whether to use map-side aggregation in Hive Group By
+			queries</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.skewindata</name>
+		<value>false</value>
+		<description>Whether there is skew in data to optimize group by
+			queries</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.mapaggr.checkinterval</name>
+		<value>100000</value>
+		<description>Number of rows after which size of the grouping
+			keys/aggregation classes is performed</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.local.mem</name>
+		<value>0</value>
+		<description>For local mode, memory of the mappers/reducers
+		</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.percentmemory</name>
+		<value>0.5</value>
+		<description>Portion of total memory to be used by map-side grup
+			aggregation hash table</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.min.reduction</name>
+		<value>0.5</value>
+		<description>Hash aggregation will be turned off if the ratio between
+			hash
+			table size and input rows is bigger than this number. Set to 1 to make
+			sure
+			hash aggregation is never turned off.</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.cp</name>
+		<value>true</value>
+		<description>Whether to enable column pruner</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.ppd</name>
+		<value>true</value>
+		<description>Whether to enable predicate pushdown</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.pruner</name>
+		<value>true</value>
+		<description>Whether to enable the new partition pruner which depends
+			on predicate pushdown. If this is disabled,
+			the old partition pruner which is based on AST will be enabled.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.groupby</name>
+		<value>true</value>
+		<description>Whether to enable the bucketed group by from bucketed
+			partitions/tables.</description>
+	</property>
+
+	<property>
+		<name>hive.join.emit.interval</name>
+		<value>1000</value>
+		<description>How many rows in the right-most join operand Hive should
+			buffer before emitting the join result. </description>
+	</property>
+
+	<property>
+		<name>hive.join.cache.size</name>
+		<value>25000</value>
+		<description>How many rows in the joining tables (except the streaming
+			table) should be cached in memory. </description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.bucket.cache.size</name>
+		<value>100</value>
+		<description>How many values in each keys in the map-joined table
+			should be cached in memory. </description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.maxsize</name>
+		<value>100000</value>
+		<description>Maximum # of rows of the small table that can be handled
+			by map-side join. If the size is reached and hive.task.progress is
+			set, a fatal error counter is set and the job will be killed.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.cache.numrows</name>
+		<value>25000</value>
+		<description>How many rows should be cached by jdbm for map join.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.skewjoin</name>
+		<value>false</value>
+		<description>Whether to enable skew join optimization. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.key</name>
+		<value>100000</value>
+		<description>Determine if we get a skew key in join. If we see more
+			than the specified number of rows with the same key in join operator,
+			we think the key as a skew join key. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.map.tasks</name>
+		<value>10000</value>
+		<description> Determine the number of map task used in the follow up
+			map join job
+			for a skew join. It should be used together with
+			hive.skewjoin.mapjoin.min.split
+			to perform a fine grained control.</description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.min.split</name>
+		<value>33554432</value>
+		<description> Determine the number of map task at most used in the
+			follow up map join job
+			for a skew join by specifying the minimum split size. It should be used
+			together with
+			hive.skewjoin.mapjoin.map.tasks to perform a fine grained control.</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.mode</name>
+		<value>nonstrict</value>
+		<description>The mode in which the hive operations are being
+			performed. In strict mode, some risky queries are not allowed to run
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.maxerrsize</name>
+		<value>100000</value>
+		<description>Maximum number of bytes a script is allowed to emit to
+			standard error (per map-reduce task). This prevents runaway scripts
+			from filling logs partitions to capacity </description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.allow.partial.consumption</name>
+		<value>false</value>
+		<description> When enabled, this option allows a user script to exit
+			successfully without consuming all the data from the standard input.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.operator.id.env.var</name>
+		<value>HIVE_SCRIPT_OPERATOR_ID</value>
+		<description> Name of the environment variable that holds the unique
+			script operator ID in the user's transform function (the custom
+			mapper/reducer that the user has specified in the query)
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.output</name>
+		<value>false</value>
+		<description> This controls whether the final outputs of a query (to a
+			local/hdfs file or a hive table) is compressed. The compression codec
+			and other options are determined from hadoop config variables
+			mapred.output.compress* </description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.intermediate</name>
+		<value>false</value>
+		<description> This controls whether intermediate files produced by
+			hive between multiple map-reduce jobs are compressed. The compression
+			codec and other options are determined from hadoop config variables
+			mapred.output.compress* </description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel</name>
+		<value>false</value>
+		<description>Whether to execute jobs in parallel</description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel.thread.number</name>
+		<value>8</value>
+		<description>How many jobs at most can be executed in parallel
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.war.file</name>
+		<value>lib\hive-hwi-0.7.0.war</value>
+		<description>This sets the path to the HWI war file, relative to
+			${HIVE_HOME}. </description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.host</name>
+		<value>0.0.0.0</value>
+		<description>This is the host address the Hive Web Interface will
+			listen on</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.port</name>
+		<value>9999</value>
+		<description>This is the port the Hive Web Interface will listen on
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.pre.hooks</name>
+		<value></value>
+		<description>Pre Execute Hook for Tests</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapfiles</name>
+		<value>true</value>
+		<description>Merge small files at the end of a map-only job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapredfiles</name>
+		<value>false</value>
+		<description>Merge small files at the end of a map-reduce job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.heartbeat.interval</name>
+		<value>1000</value>
+		<description>Send a heartbeat after this interval - used by mapjoin
+			and filter operators</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.per.task</name>
+		<value>256000000</value>
+		<description>Size of merged files at the end of the job</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.smallfiles.avgsize</name>
+		<value>16000000</value>
+		<description>When the average output file size of a job is less than
+			this number, Hive will start an additional map-reduce job to merge
+			the output files into bigger files. This is only done for map-only
+			jobs if hive.merge.mapfiles is true, and for map-reduce jobs if
+			hive.merge.mapredfiles is true.</description>
+	</property>
+
+	<property>
+		<name>hive.script.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive Tranform/Map/Reduce Clause should
+			automatically send progress information to TaskTracker to avoid the
+			task getting killed because of inactivity. Hive sends progress
+			information when the script is outputting to stderr. This option
+			removes the need of periodically producing stderr messages, but users
+			should be cautious because this may prevent infinite loops in the
+			scripts to be killed by TaskTracker.  </description>
+	</property>
+
+	<property>
+		<name>hive.script.serde</name>
+		<value>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</value>
+		<description>The default serde for trasmitting input data to and
+			reading output data from the user scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.script.recordreader</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordReader</value>
+		<description>The default record reader for reading data from the user
+			scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.script.recordwriter</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordWriter</value>
+		<description>The default record writer for writing data to the user
+			scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.input.format</name>
+		<value>org.apache.hadoop.hive.ql.io.HiveInputFormat</value>
+		<description>The default input format, if it is not specified, the
+			system assigns it. It is set to HiveInputFormat for hadoop versions
+			17, 18 and 19, whereas it is set to CombinedHiveInputFormat for
+			hadoop 20. The user can always overwrite it - if there is a bug in
+			CombinedHiveInputFormat, it can always be manually set to
+			HiveInputFormat. </description>
+	</property>
+
+	<property>
+		<name>hive.udtf.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive should automatically send progress
+			information to TaskTracker when using UDTF's to prevent the task
+			getting killed because of inactivity. Users should be cautious
+			because this may prevent TaskTracker from killing tasks with infinte
+			loops.  </description>
+	</property>
+
+	<property>
+		<name>hive.mapred.reduce.tasks.speculative.execution</name>
+		<value>true</value>
+		<description>Whether speculative execution for reducers should be
+			turned on. </description>
+	</property>
+
+	<property>
+		<name>hive.exec.counters.pull.interval</name>
+		<value>1000</value>
+		<description>The interval with which to poll the JobTracker for the
+			counters the running job. The smaller it is the more load there will
+			be on the jobtracker, the higher it is the less granular the caught
+			will be.</description>
+	</property>
+
+	<property>
+		<name>hive.enforce.bucketing</name>
+		<value>false</value>
+		<description>Whether bucketing is enforced. If true, while inserting
+			into the table, bucketing is enforced. </description>
+	</property>
+
+	<property>
+		<name>hive.enforce.sorting</name>
+		<value>false</value>
+		<description>Whether sorting is enforced. If true, while inserting
+			into the table, sorting is enforced. </description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.connection.url.hook</name>
+		<value></value>
+		<description>Name of the hook to use for retriving the JDO connection
+			URL. If empty, the value in javax.jdo.option.ConnectionURL is used
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.attempts</name>
+		<value>1</value>
+		<description>The number of times to retry a metastore call if there
+			were a connection error</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.interval</name>
+		<value>1000</value>
+		<description>The number of miliseconds between metastore retry
+			attempts</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.min.threads</name>
+		<value>200</value>
+		<description>Minimum number of worker threads in the Thrift server's
+			pool.</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.max.threads</name>
+		<value>100000</value>
+		<description>Maximum number of worker threads in the Thrift server's
+			pool.</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.tcp.keepalive</name>
+		<value>true</value>
+		<description>Whether to enable TCP keepalive for the metastore server.
+			Keepalive will prevent accumulation of half-open connections.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.reducededuplication</name>
+		<value>true</value>
+		<description>Remove extra map-reduce jobs if the data is already
+			clustered by the same key which needs to be used again. This should
+			always be set to true. Since it is a new feature, it has been made
+			configurable.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition</name>
+		<value>false</value>
+		<description>Whether or not to allow dynamic partitions in DML/DDL.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition.mode</name>
+		<value>strict</value>
+		<description>In strict mode, the user must specify at least one static
+			partition in case the user accidentally overwrites all partitions.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions</name>
+		<value>1000</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in total.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions.pernode</name>
+		<value>100</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in each mapper/reducer node.</description>
+	</property>
+
+	<property>
+		<name>hive.default.partition.name</name>
+		<value>__HIVE_DEFAULT_PARTITION__</value>
+		<description>The default partition name in case the dynamic partition
+			column value is null/empty string or anyother values that cannot be
+			escaped. This value must not contain any special character used in
+			HDFS URI (e.g., ':', '%', '/' etc). The user has to be aware that the
+			dynamic partition value should not contain this value to avoid
+			confusions.</description>
+	</property>
+
+	<property>
+		<name>fs.har.impl</name>
+		<value>org.apache.hadoop.hive.shims.HiveHarFileSystem</value>
+		<description>The implementation for accessing Hadoop Archives. Note
+			that this won't be applicable to Hadoop vers less than 0.20
+		</description>
+	</property>
+
+	<property>
+		<name>hive.archive.enabled</name>
+		<value>false</value>
+		<description>Whether archiving operations are permitted</description>
+	</property>
+
+	<property>
+		<name>hive.archive.har.parentdir.settable</name>
+		<value>false</value>
+		<description>In new Hadoop versions, the parent directory must be set
+			while
+			creating a HAR. Because this functionality is hard to detect with just
+			version
+			numbers, this conf var needs to be set manually.</description>
+	</property>
+
+	<!-- HBase Storage Handler Parameters -->
+
+	<property>
+		<name>hive.hbase.wal.enabled</name>
+		<value>true</value>
+		<description>Whether writes to HBase should be forced to the
+			write-ahead log. Disabling this improves HBase write performance at
+			the risk of lost writes in case of a crash.</description>
+	</property>
+
+</configuration>
diff --git a/hivesterix/hivesterix-dist/conf/hive-log4j.properties b/hivesterix/hivesterix-dist/conf/hive-log4j.properties
new file mode 100644
index 0000000..784a274
--- /dev/null
+++ b/hivesterix/hivesterix-dist/conf/hive-log4j.properties
@@ -0,0 +1,58 @@
+#------------------------------------------------------------------------------
+#
+#  The following properties set the logging levels and log appender.  The
+#  log4j.rootCategory variable defines the default log level and one or more
+#  appenders.  For the console, use 'S'.  For the daily rolling file, use 'R'.
+#  For an HTML formatted log, use 'H'.
+#
+#  To override the default (rootCategory) log level, define a property of the
+#  form (see below for available values):
+#
+#        log4j.logger. =
+#
+#    Available logger names:
+#      TODO
+#
+#    Possible Log Levels:
+#      FATAL, ERROR, WARN, INFO, DEBUG
+#
+#------------------------------------------------------------------------------
+log4j.rootCategory=INFO, S
+
+log4j.logger.com.dappit.Dapper.parser=ERROR
+log4j.logger.org.w3c.tidy=FATAL
+
+#------------------------------------------------------------------------------
+#
+#  The following properties configure the console (stdout) appender.
+#  See http://logging.apache.org/log4j/docs/api/index.html for details.
+#
+#------------------------------------------------------------------------------
+log4j.appender.S = org.apache.log4j.ConsoleAppender
+log4j.appender.S.layout = org.apache.log4j.PatternLayout
+log4j.appender.S.layout.ConversionPattern = %d{yyyy-MM-dd HH:mm:ss} %c{1} [%p] %m%n
+
+#------------------------------------------------------------------------------
+#
+#  The following properties configure the Daily Rolling File appender.
+#  See http://logging.apache.org/log4j/docs/api/index.html for details.
+#
+#------------------------------------------------------------------------------
+log4j.appender.R = org.apache.log4j.DailyRollingFileAppender
+log4j.appender.R.File = logs/bensApps.log
+log4j.appender.R.Append = true
+log4j.appender.R.DatePattern = '.'yyy-MM-dd
+log4j.appender.R.layout = org.apache.log4j.PatternLayout
+log4j.appender.R.layout.ConversionPattern = %d{yyyy-MM-dd HH:mm:ss} %c{1} [%p] %m%n
+
+#------------------------------------------------------------------------------
+#
+#  The following properties configure the Rolling File appender in HTML.
+#  See http://logging.apache.org/log4j/docs/api/index.html for details.
+#
+#------------------------------------------------------------------------------
+log4j.appender.H = org.apache.log4j.RollingFileAppender
+log4j.appender.H.File = logs/bensApps.html
+log4j.appender.H.MaxFileSize = 100KB
+log4j.appender.H.Append = false
+log4j.appender.H.layout = org.apache.log4j.HTMLLayout
diff --git a/hivesterix/hivesterix-dist/conf/master b/hivesterix/hivesterix-dist/conf/master
new file mode 100644
index 0000000..2fbb50c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/conf/master
@@ -0,0 +1 @@
+localhost
diff --git a/hivesterix/hivesterix-dist/conf/slaves b/hivesterix/hivesterix-dist/conf/slaves
new file mode 100644
index 0000000..2fbb50c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/conf/slaves
@@ -0,0 +1 @@
+localhost
diff --git a/hivesterix/hivesterix-dist/pom.xml b/hivesterix/hivesterix-dist/pom.xml
new file mode 100644
index 0000000..20268df
--- /dev/null
+++ b/hivesterix/hivesterix-dist/pom.xml
@@ -0,0 +1,512 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"

+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">

+	<modelVersion>4.0.0</modelVersion>

+	<groupId>edu.uci.ics.hyracks</groupId>

+	<artifactId>hivesterix-dist</artifactId>

+	<version>0.2.3-SNAPSHOT</version>

+	<name>hivesterix-dist</name>

+	<dependencies>

+		<dependency>

+			<groupId>javax.servlet</groupId>

+			<artifactId>servlet-api</artifactId>

+			<version>2.5</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>junit</groupId>

+			<artifactId>junit</artifactId>

+			<version>4.8.1</version>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>args4j</groupId>

+			<artifactId>args4j</artifactId>

+			<version>2.0.12</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.json</groupId>

+			<artifactId>json</artifactId>

+			<version>20090211</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.eclipse.jetty</groupId>

+			<artifactId>jetty-server</artifactId>

+			<version>8.0.0.M1</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.eclipse.jetty</groupId>

+			<artifactId>jetty-servlet</artifactId>

+			<version>8.0.0.M1</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>jline</groupId>

+			<artifactId>jline</artifactId>

+			<version>0.9.94</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.datanucleus</groupId>

+			<artifactId>datanucleus-core</artifactId>

+			<version>2.0.3</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.datanucleus</groupId>

+			<artifactId>datanucleus-connectionpool</artifactId>

+			<version>2.0.3</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.datanucleus</groupId>

+			<artifactId>datanucleus-enhancer</artifactId>

+			<version>2.0.3</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.datanucleus</groupId>

+			<artifactId>datanucleus-rdbms</artifactId>

+			<version>2.0.3</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>commons-dbcp</groupId>

+			<artifactId>commons-dbcp</artifactId>

+			<version>1.4</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>commons-pool</groupId>

+			<artifactId>commons-pool</artifactId>

+			<version>1.5.4</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>commons-collections</groupId>

+			<artifactId>commons-collections</artifactId>

+			<version>3.2.1</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>commons-lang</groupId>

+			<artifactId>commons-lang</artifactId>

+			<version>2.4</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>javax</groupId>

+			<artifactId>jdo2-api</artifactId>

+			<version>2.3-ec</version>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>com.facebook</groupId>

+			<artifactId>libfb303</artifactId>

+			<version>0.5.0</version>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.thrift</groupId>

+			<artifactId>libthrift</artifactId>

+			<version>0.5.0</version>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.commons</groupId>

+			<artifactId>cli</artifactId>

+			<version>1.2</version>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache</groupId>

+			<artifactId>log4j</artifactId>

+			<version>1.2.15</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.antlr</groupId>

+			<artifactId>antlr-runtime</artifactId>

+			<version>3.0.1</version>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.hadoop.hive</groupId>

+			<artifactId>hive-cli</artifactId>

+			<version>0.7.0</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.hadoop.hive</groupId>

+			<artifactId>hive-common</artifactId>

+			<version>0.7.0</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.hadoop.hive</groupId>

+			<artifactId>hive-exec</artifactId>

+			<version>0.7.0</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.hadoop.hive</groupId>

+			<artifactId>hive-hwi</artifactId>

+			<version>0.7.0</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.hadoop.hive</groupId>

+			<artifactId>hive-jdbc</artifactId>

+			<version>0.7.0</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.hadoop.hive</groupId>

+			<artifactId>hive-metastore</artifactId>

+			<version>0.7.0</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.hadoop.hive</groupId>

+			<artifactId>hive-service</artifactId>

+			<version>0.7.0</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.hadoop.hive</groupId>

+			<artifactId>hive-shims</artifactId>

+			<version>0.7.0</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.hadoop.hive</groupId>

+			<artifactId>hive-serde</artifactId>

+			<version>0.7.0</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.slf4j</groupId>

+			<artifactId>slf4j-api</artifactId>

+			<version>1.6.1</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>commons-cli</groupId>

+			<artifactId>commons-cli</artifactId>

+			<version>1.2</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.slf4j</groupId>

+			<artifactId>slf4j-log4j12</artifactId>

+			<version>1.6.1</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>commons-logging</groupId>

+			<artifactId>commons-logging</artifactId>

+			<version>1.1.1</version>

+			<type>jar</type>

+			<classifier>api</classifier>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>com.google.guava</groupId>

+			<artifactId>guava</artifactId>

+			<version>r06</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.antlr</groupId>

+			<artifactId>stringtemplate</artifactId>

+			<version>3.2</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.derby</groupId>

+			<artifactId>derby</artifactId>

+			<version>10.8.1.2</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.hadoop</groupId>

+			<artifactId>hadoop-core</artifactId>

+			<version>0.20.2</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>edu.uci.ics.hyracks</groupId>

+			<artifactId>hivesterix-translator</artifactId>

+			<version>0.2.3-SNAPSHOT</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>edu.uci.ics.hyracks</groupId>

+			<artifactId>hivesterix-optimizer</artifactId>

+			<version>0.2.3-SNAPSHOT</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>org.apache.hbase</groupId>

+			<artifactId>hbase</artifactId>

+			<version>0.90.3</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>edu.uci.ics.hyracks</groupId>

+			<artifactId>algebricks-compiler</artifactId>

+			<version>0.2.3-SNAPSHOT</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>edu.uci.ics.hyracks</groupId>

+			<artifactId>hyracks-control-cc</artifactId>

+			<version>0.2.3-SNAPSHOT</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+		<dependency>

+			<groupId>edu.uci.ics.hyracks</groupId>

+			<artifactId>hyracks-control-nc</artifactId>

+			<version>0.2.3-SNAPSHOT</version>

+			<type>jar</type>

+			<scope>compile</scope>

+		</dependency>

+	</dependencies>

+	<build>

+		<plugins>

+			<plugin>

+				<groupId>org.apache.maven.plugins</groupId>

+				<artifactId>maven-compiler-plugin</artifactId>

+				<version>2.0.2</version>

+				<configuration>

+					<source>1.7</source>

+					<target>1.7</target>

+					<encoding>UTF-8</encoding>

+					<fork>true</fork>

+				</configuration>

+			</plugin>

+			<plugin>

+				<artifactId>maven-jar-plugin</artifactId>

+				<executions>

+					<execution>

+						<id>patch</id>

+						<goals>

+							<goal>jar</goal>

+						</goals>

+						<phase>package</phase>

+						<configuration>

+							<classifier>patch</classifier>

+							<finalName>a-hive</finalName>

+							<includes>

+								<include>**/org/apache/**</include>

+							</includes>

+						</configuration>

+					</execution>

+				</executions>

+			</plugin>

+			<plugin>

+				<groupId>org.codehaus.mojo</groupId>

+				<artifactId>appassembler-maven-plugin</artifactId>

+				<version>1.3</version>

+				<executions>

+					<execution>

+						<configuration>

+							<programs>

+								<program>

+									<mainClass>edu.uci.ics.asterix.hive.cli.CliDriver</mainClass>

+									<name>algebricks-hivesterix-cmd</name>

+								</program>

+							</programs>

+							<repositoryLayout>flat</repositoryLayout>

+							<repositoryName>lib</repositoryName>

+						</configuration>

+						<phase>package</phase>

+						<goals>

+							<goal>assemble</goal>

+						</goals>

+					</execution>

+				</executions>

+			</plugin>

+			<plugin>

+				<artifactId>maven-assembly-plugin</artifactId>

+				<version>2.2-beta-5</version>

+				<executions>

+					<execution>

+						<configuration>

+							<descriptors>

+								<descriptor>src/main/assembly/binary-assembly.xml</descriptor>

+							</descriptors>

+						</configuration>

+						<phase>package</phase>

+						<goals>

+							<goal>attached</goal>

+						</goals>

+					</execution>

+				</executions>

+			</plugin>

+			<plugin>

+				<groupId>org.apache.maven.plugins</groupId>

+				<artifactId>maven-surefire-plugin</artifactId>

+				<version>2.13</version>

+				<configuration>

+					<forkMode>pertest</forkMode>

+					<argLine>-enableassertions -Xmx2047m -Dfile.encoding=UTF-8

+						-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>

+					<includes>

+						<include>**/test/optimizer/*TestSuite.java</include>

+						<include>**/test/optimizer/*Test.java</include>

+						<include>**/test/runtimefunction/*TestSuite.java</include>

+						<include>**/test/runtimefunction/*Test.java</include>

+					</includes>

+				</configuration>

+			</plugin>

+			<plugin>

+				<artifactId>maven-resources-plugin</artifactId>

+				<version>2.5</version>

+				<executions>

+					<execution>

+						<id>copy-scripts</id>

+						<!-- here the phase you need -->

+						<phase>package</phase>

+						<goals>

+							<goal>copy-resources</goal>

+						</goals>

+						<configuration>

+							<outputDirectory>target/appassembler/bin</outputDirectory>

+							<resources>

+								<resource>

+									<directory>src/main/resources/scripts</directory>

+								</resource>

+							</resources>

+						</configuration>

+					</execution>

+					<execution>

+						<id>copy-conf</id>

+						<!-- here the phase you need -->

+						<phase>package</phase>

+						<goals>

+							<goal>copy-resources</goal>

+						</goals>

+						<configuration>

+							<outputDirectory>target/appassembler/conf</outputDirectory>

+							<resources>

+								<resource>

+									<directory>src/main/resources/conf</directory>

+								</resource>

+							</resources>

+						</configuration>

+					</execution>

+					<execution>

+						<id>copy-jar</id>

+						<!-- here the phase you need -->

+						<phase>package</phase>

+						<goals>

+							<goal>copy-resources</goal>

+						</goals>

+						<configuration>

+							<outputDirectory>target/appassembler/lib</outputDirectory>

+							<resources>

+								<resource>

+									<directory>target</directory>

+									<includes>

+										<include>*patch.jar</include>

+									</includes>

+								</resource>

+							</resources>

+						</configuration>

+					</execution>

+				</executions>

+			</plugin>

+			<plugin>

+				<artifactId>maven-clean-plugin</artifactId>

+				<version>2.5</version>

+				<configuration>

+					<filesets>

+						<fileset>

+							<directory>.</directory>

+							<includes>

+								<include>metastore*</include>

+								<include>hadoop*</include>

+								<include>edu*</include>

+								<include>tmp*</include>

+								<include>build*</include>

+								<include>target*</include>

+								<include>log*</include>

+								<include>derby.log</include>

+								<include>ClusterController*</include>

+							</includes>

+						</fileset>

+					</filesets>

+				</configuration>

+			</plugin>

+		</plugins>

+	</build>

+	<repositories>

+		<repository>

+			<releases>

+				<enabled>true</enabled>

+				<updatePolicy>always</updatePolicy>

+				<checksumPolicy>warn</checksumPolicy>

+			</releases>

+			<snapshots>

+				<enabled>true</enabled>

+				<updatePolicy>always</updatePolicy>

+				<checksumPolicy>fail</checksumPolicy>

+			</snapshots>

+			<id>third-party</id>

+			<url>http://obelix.ics.uci.edu/nexus/content/repositories/third-party</url>

+		</repository>

+		<repository>

+			<releases>

+				<enabled>true</enabled>

+				<updatePolicy>always</updatePolicy>

+				<checksumPolicy>warn</checksumPolicy>

+			</releases>

+			<snapshots>

+				<enabled>true</enabled>

+				<updatePolicy>always</updatePolicy>

+				<checksumPolicy>fail</checksumPolicy>

+			</snapshots>

+			<id>hyracks-public-release</id>

+			<url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-public-releases</url>

+		</repository>

+	</repositories>

+</project>

diff --git a/hivesterix/hivesterix-dist/resource/asterix/destroy.sh b/hivesterix/hivesterix-dist/resource/asterix/destroy.sh
new file mode 100644
index 0000000..9ece8d6
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix/destroy.sh
@@ -0,0 +1,2 @@
+connect to "localhost:3099";
+destroy application hivex;
diff --git a/hivesterix/hivesterix-dist/resource/asterix/hivedeploy.hcli b/hivesterix/hivesterix-dist/resource/asterix/hivedeploy.hcli
new file mode 100644
index 0000000..f588ac6
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix/hivedeploy.hcli
@@ -0,0 +1,2 @@
+connect to "128.195.14.4:3099";
+create application hivesterix "/home/yingyib/hivesterix/target/algebricks-hivesterix-0.0.1-SNAPSHOT-binary-assembly.zip";
diff --git a/hivesterix/hivesterix-dist/resource/asterix/hivedestroy.hcli b/hivesterix/hivesterix-dist/resource/asterix/hivedestroy.hcli
new file mode 100644
index 0000000..c53fa81
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix/hivedestroy.hcli
@@ -0,0 +1,2 @@
+connect to "localhost:3099";
+destroy application hivesterix;
diff --git a/hivesterix/hivesterix-dist/resource/asterix/startall.sh b/hivesterix/hivesterix-dist/resource/asterix/startall.sh
new file mode 100755
index 0000000..e28ae27
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix/startall.sh
@@ -0,0 +1,16 @@
+ssh asterix-master './hivesterix/target/appassembler/asterix/startcc.sh'&
+sleep 20
+ssh asterix-001 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-002 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-003 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-004 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-005 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-006 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-007 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-008 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-009 './hivesterix/target/appassembler/asterix/startnc.sh'&
+ssh asterix-010 './hivesterix/target/appassembler/asterix/startnc.sh'&
+
+sleep 30
+export HYRACKS_HOME=/home/yingyib/hyracks_asterix_stabilization
+$HYRACKS_HOME/hyracks-cli/target/appassembler/bin/hyrackscli < ~/hivesterix/target/appassembler/asterix/hivedeploy.hcli
diff --git a/hivesterix/hivesterix-dist/resource/asterix/startcc.sh b/hivesterix/hivesterix-dist/resource/asterix/startcc.sh
new file mode 100755
index 0000000..f313a9f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix/startcc.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+LOGSDIR=/mnt/data/sda/space/yingyi/hyracks/logs
+HYRACKS_HOME=/home/yingyib/hyracks_asterix_stabilization
+
+export JAVA_OPTS="-Xmx2g  -Djava.rmi.server.hostname=128.195.14.4"
+
+cd $LOGSDIR
+echo $HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address 128.195.14.4 -cluster-net-ip-address 10.1.0.1 -client-net-port 3099 -cluster-net-port 1099 -max-heartbeat-lapse-periods 999999 &> $LOGSDIR/cc-asterix.log&
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address 128.195.14.4 -cluster-net-ip-address 10.1.0.1 -client-net-port 3099 -cluster-net-port 1099 -max-heartbeat-lapse-periods 999999 &> $LOGSDIR/cc-asterix.log&
diff --git a/hivesterix/hivesterix-dist/resource/asterix/startnc.sh b/hivesterix/hivesterix-dist/resource/asterix/startnc.sh
new file mode 100755
index 0000000..6cbd5e9
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix/startnc.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+export JAVA_HOME=/usr/local/java/vms/java
+
+LOGSDIR=/mnt/data/sda/space/yingyi/hyracks/logs
+HYRACKS_HOME=/home/yingyib/hyracks_asterix_stabilization
+
+IPADDR=`/sbin/ifconfig eth0 | grep "inet addr" | awk '{print $2}' | cut -f 2 -d ':'`
+NODEID=`ypcat hosts | grep asterix | grep "$IPADDR " | awk '{print $2}'`
+
+rm -rf /mnt/data/sda/space/yingyi/tmp/*
+rm -rf /mnt/data/sdb/space/yingyi/tmp/*
+rm -rf /mnt/data/sdc/space/yingyi/tmp/*
+rm -rf /mnt/data/sdd/space/yingyi/tmp/*
+
+
+export JAVA_OPTS="-Xmx10G"
+
+cd $LOGSDIR
+echo $HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host 128.195.14.4 -cc-port 3099 -data-ip-address $IPADDR -node-id $NODEID
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host 10.1.0.1 -cc-port 1099 -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -node-id $NODEID -iodevices "/mnt/data/sda/space/yingyi/tmp/,/mnt/data/sdb/space/yingyi/tmp/,/mnt/data/sdc/space/yingyi/tmp/,/mnt/data/sdd/space/yingyi/tmp/" -frame-size 32768&> $LOGSDIR/$NODEID.log &
diff --git a/hivesterix/hivesterix-dist/resource/asterix/stopall.sh b/hivesterix/hivesterix-dist/resource/asterix/stopall.sh
new file mode 100755
index 0000000..7cd5a5a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix/stopall.sh
@@ -0,0 +1,11 @@
+ssh asterix-master './hivesterix/target/appassembler/asterix/stopcc.sh'&
+ssh asterix-001 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-002 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-003 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-004 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-005 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-006 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-007 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-008 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-009 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
+ssh asterix-010 '/home/yingyib/hivesterix/target/appassembler/asterix/stopnc.sh'&
diff --git a/hivesterix/hivesterix-dist/resource/asterix/stopcc.sh b/hivesterix/hivesterix-dist/resource/asterix/stopcc.sh
new file mode 100755
index 0000000..51a1066
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix/stopcc.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+PID=`/usr/local/java/vms/java/bin/jps | grep CCDriver | awk '{print $1}'`
+
+echo $PID
+kill -9 $PID
diff --git a/hivesterix/hivesterix-dist/resource/asterix/stopnc.sh b/hivesterix/hivesterix-dist/resource/asterix/stopnc.sh
new file mode 100755
index 0000000..77cecfc
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix/stopnc.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+#PID=`/usr/local/java/vms/java/bin/jps | grep NCDriver | awk '{print $1}'`
+
+PID=`ps -ef|grep yingyib|grep java|grep hyracks|awk '{print $2}'`
+
+echo $PID
+kill -9 $PID
+
+#PID=`ps -ef|grep yingyib|grep java|grep datanode|awk '{print $2}'`
+
+#echo $PID
+#kill -9 $PID
+
+
+#PID=`ps -ef|grep yingyib|grep java|grep tasktracker|awk '{print $2}'`
+
+#echo $PID
+#kill -9 $PID
+
+rm -rf /data/yingyi/tmp/*
+rm -rf /data/yingyi/tmp/*
+rm -rf /data/yingyi/tmp/*
+rm -rf /data/yingyi/tmp/*
diff --git a/hivesterix/hivesterix-dist/resource/asterix_dbg/destroy.sh b/hivesterix/hivesterix-dist/resource/asterix_dbg/destroy.sh
new file mode 100644
index 0000000..9ece8d6
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix_dbg/destroy.sh
@@ -0,0 +1,2 @@
+connect to "localhost:3099";
+destroy application hivex;
diff --git a/hivesterix/hivesterix-dist/resource/asterix_dbg/hivedeploy.hcli b/hivesterix/hivesterix-dist/resource/asterix_dbg/hivedeploy.hcli
new file mode 100644
index 0000000..6012493
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix_dbg/hivedeploy.hcli
@@ -0,0 +1,2 @@
+connect to "localhost:3099";
+create application hivesterix "/home/yingyib/hivesterix/target/algebricks-hivesterix-0.0.1-SNAPSHOT-binary-assembly.zip";
diff --git a/hivesterix/hivesterix-dist/resource/asterix_dbg/hivedestroy.hcli b/hivesterix/hivesterix-dist/resource/asterix_dbg/hivedestroy.hcli
new file mode 100644
index 0000000..c53fa81
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix_dbg/hivedestroy.hcli
@@ -0,0 +1,2 @@
+connect to "localhost:3099";
+destroy application hivesterix;
diff --git a/hivesterix/hivesterix-dist/resource/asterix_dbg/startall.sh b/hivesterix/hivesterix-dist/resource/asterix_dbg/startall.sh
new file mode 100755
index 0000000..24e57c4
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix_dbg/startall.sh
@@ -0,0 +1,17 @@
+HYRACKS_HOME=/home/yingyib/hyracks-0.1.5
+
+ssh asterix-master ./hivesterix/target/appassembler/asterix_dbg/startcc.sh
+sleep 20
+ssh asterix-001 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-002 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-003 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-004 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-005 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-006 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-007 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-008 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-009 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+ssh asterix-010 ./hivesterix/target/appassembler/asterix_dbg/startnc.sh
+
+sleep 10
+$HYRACKS_HOME/hyracks-cli/target/appassembler/bin/hyrackscli < ~/hivesterix/target/appassembler/asterix_dbg/hivedeploy.hcli
diff --git a/hivesterix/hivesterix-dist/resource/asterix_dbg/startcc.sh b/hivesterix/hivesterix-dist/resource/asterix_dbg/startcc.sh
new file mode 100755
index 0000000..5a0043e
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix_dbg/startcc.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+LOGSDIR=/mnt/data/sda/space/yingyi/hyracks/logs
+HYRACKS_HOME=/home/yingyib/hyracks-0.1.5
+
+export JAVA_OPTS="-Djava.rmi.server.hostname=128.195.14.4 -Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n"
+
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -port 3099  &> $LOGSDIR/cc-asterix.log&
diff --git a/hivesterix/hivesterix-dist/resource/asterix_dbg/startnc.sh b/hivesterix/hivesterix-dist/resource/asterix_dbg/startnc.sh
new file mode 100755
index 0000000..5a6024b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix_dbg/startnc.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+export JAVA_HOME=/usr/local/java/vms/java
+
+LOGSDIR=/mnt/data/sda/space/yingyi/hyracks/logs
+HYRACKS_HOME=/home/yingyib/hyracks-0.1.5
+
+IPADDR=`/sbin/ifconfig eth0 | grep "inet addr" | awk '{print $2}' | cut -f 2 -d ':'`
+NODEID=`ypcat hosts | grep asterix | grep "$IPADDR " | awk '{print $2}'`
+
+export JAVA_OPTS="-Xmx10G -agentpath:/home/yingyib/yjp-9.5.6/bin/linux-x86-64/libyjpagent.so=listen=28001"
+
+echo $HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host 128.195.14.4 -cc-port 3099 -data-ip-address $IPADDR -node-id $NODEID
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host 128.195.14.4 -cc-port 3099  -data-ip-address $IPADDR -node-id $NODEID -iodevices "/mnt/data/sda/space/yingyi/tmp/,/mnt/data/sdb/space/yingyi/tmp/,/mnt/data/sdc/space/yingyi/tmp/,/mnt/data/sdd/space/yingyi/tmp/" &> $LOGSDIR/$NODEID.log &
diff --git a/hivesterix/hivesterix-dist/resource/asterix_dbg/stopall.sh b/hivesterix/hivesterix-dist/resource/asterix_dbg/stopall.sh
new file mode 100755
index 0000000..e2295f1
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix_dbg/stopall.sh
@@ -0,0 +1,11 @@
+ssh asterix-master ./hivesterix/target/appassembler/asterix_dbg/stopcc.sh
+ssh asterix-001 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-002 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-003 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-004 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-005 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-006 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-007 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-008 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-009 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
+ssh asterix-010 ./hivesterix/target/appassembler/asterix_dbg/stopnc.sh
diff --git a/hivesterix/hivesterix-dist/resource/asterix_dbg/stopcc.sh b/hivesterix/hivesterix-dist/resource/asterix_dbg/stopcc.sh
new file mode 100755
index 0000000..51a1066
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix_dbg/stopcc.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+PID=`/usr/local/java/vms/java/bin/jps | grep CCDriver | awk '{print $1}'`
+
+echo $PID
+kill -9 $PID
diff --git a/hivesterix/hivesterix-dist/resource/asterix_dbg/stopnc.sh b/hivesterix/hivesterix-dist/resource/asterix_dbg/stopnc.sh
new file mode 100755
index 0000000..6bbbb3b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/asterix_dbg/stopnc.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+#PID=`/usr/local/java/vms/java/bin/jps | grep NCDriver | awk '{print $1}'`
+
+PID=`ps -ef|grep yingyib|grep java|grep hyracks|awk '{print $2}'`
+
+echo $PID
+kill -9 $PID
+
+PID=`ps -ef|grep yingyib|grep java|grep datanode|awk '{print $2}'`
+
+echo $PID
+kill -9 $PID
+
+
+PID=`ps -ef|grep yingyib|grep java|grep tasktracker|awk '{print $2}'`
+
+echo $PID
+kill -9 $PID
+
+rm -rf /mnt/data/sda/space/yingyi/tmp/*
+rm -rf /mnt/data/sdb/space/yingyi/tmp/*
+rm -rf /mnt/data/sdc/space/yingyi/tmp/*
+rm -rf /mnt/data/sdd/space/yingyi/tmp/*
diff --git a/hivesterix/hivesterix-dist/resource/bin/ext/cli.sh b/hivesterix/hivesterix-dist/resource/bin/ext/cli.sh
new file mode 100644
index 0000000..914aae3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/ext/cli.sh
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=cli
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+cli () {
+  CLASS=org.apache.hadoop.hive.cli.CliDriver
+  execHiveCmd $CLASS "$@"
+}
+
+cli_help () {
+  CLASS=org.apache.hadoop.hive.cli.CliDriver
+  execHiveCmd $CLASS "--help"
+} 
+
diff --git a/hivesterix/hivesterix-dist/resource/bin/ext/help.sh b/hivesterix/hivesterix-dist/resource/bin/ext/help.sh
new file mode 100644
index 0000000..432859a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/ext/help.sh
@@ -0,0 +1,36 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=help
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+help() {
+  echo "Usage ./hive <parameters> --service serviceName <service parameters>"
+  echo "Service List: $SERVICE_LIST"
+  echo "Parameters parsed:"
+  echo "  --auxpath : Auxillary jars "
+  echo "  --config : Hive configuration directory"
+  echo "  --service : Starts specific service/component. cli is default"
+  echo "Parameters used:"
+  echo "  HADOOP_HOME : Hadoop install directory"
+  echo "  HIVE_OPT : Hive options"
+  echo "For help on a particular service:"
+  echo "  ./hive --service serviceName --help"
+}
+
+help_help(){
+  help
+}
+
diff --git a/hivesterix/hivesterix-dist/resource/bin/ext/hiveserver.sh b/hivesterix/hivesterix-dist/resource/bin/ext/hiveserver.sh
new file mode 100644
index 0000000..b5edce4
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/ext/hiveserver.sh
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=hiveserver
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+hiveserver() {
+  echo "Starting Hive Thrift Server"
+  CLASS=org.apache.hadoop.hive.service.HiveServer
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+  JAR=${HIVE_LIB}/hive-service-*.jar
+
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar $JAR $CLASS $HIVE_PORT "$@"
+}
+
+hiveserver_help() {
+  echo "usage HIVE_PORT=xxxx ./hive --service hiveserver" 
+  echo "  HIVE_PORT : Specify the server port"
+}
+
diff --git a/hivesterix/hivesterix-dist/resource/bin/ext/hwi.sh b/hivesterix/hivesterix-dist/resource/bin/ext/hwi.sh
new file mode 100644
index 0000000..f9cd8ec
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/ext/hwi.sh
@@ -0,0 +1,50 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=hwi
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+hwi() {
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  CLASS=org.apache.hadoop.hive.hwi.HWIServer
+  # The ls hack forces the * to be expanded which is required because 
+  # System.getenv doesn't do globbing
+  export HWI_JAR_FILE=$(ls ${HIVE_LIB}/hive-hwi-*.jar)
+  export HWI_WAR_FILE=$(ls ${HIVE_LIB}/hive-hwi-*.war)
+
+  #hwi requires ant jars
+  if [ "$ANT_LIB" = "" ] ; then
+    ANT_LIB=/opt/ant/lib
+  fi
+  for f in ${ANT_LIB}/*.jar; do
+    if [[ ! -f $f ]]; then
+      continue;
+    fi
+    HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$f
+  done
+
+  export HADOOP_CLASSPATH
+  
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar ${HWI_JAR_FILE} $CLASS $HIVE_OPTS "$@"
+}
+
+hwi_help(){
+  echo "Usage ANT_LIB=XXXX hive --service hwi"	
+}
diff --git a/hivesterix/hivesterix-dist/resource/bin/ext/jar.sh b/hivesterix/hivesterix-dist/resource/bin/ext/jar.sh
new file mode 100644
index 0000000..b52f9a7
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/ext/jar.sh
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=jar
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+jar () {
+  RUNJAR=$1
+  shift
+
+  RUNCLASS=$1
+  shift
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  if [ -z "$RUNJAR" ] ; then
+    echo "RUNJAR not specified"
+    exit 3
+  fi
+
+  if [ -z "$RUNCLASS" ] ; then
+    echo "RUNCLASS not specified"
+    exit 3
+  fi
+
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar $RUNJAR $RUNCLASS $HIVE_OPTS "$@"
+}
+
+jar_help () {
+  echo "Used for applications that require Hadoop and Hive classpath and environment."
+  echo "./hive --service jar <yourjar> <yourclass> HIVE_OPTS <your_args>"
+}
diff --git a/hivesterix/hivesterix-dist/resource/bin/ext/lineage.sh b/hivesterix/hivesterix-dist/resource/bin/ext/lineage.sh
new file mode 100644
index 0000000..993bc8d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/ext/lineage.sh
@@ -0,0 +1,38 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=lineage
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+lineage () {
+  CLASS=org.apache.hadoop.hive.ql.tools.LineageInfo
+
+  # cli specific code
+  if [ ! -f ${HIVE_LIB}/hive-exec-*.jar ]; then
+    echo "Missing Hive exec Jar"
+    exit 3;
+  fi
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  exec $HADOOP jar ${HIVE_LIB}/hive-exec-*.jar $CLASS  "$@"
+}
+
+lineage_help () {
+  echo "usage ./hive 'hql' "
+} 
+
diff --git a/hivesterix/hivesterix-dist/resource/bin/ext/metastore.sh b/hivesterix/hivesterix-dist/resource/bin/ext/metastore.sh
new file mode 100644
index 0000000..db15f6e
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/ext/metastore.sh
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=metastore
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+metastore() {
+  echo "Starting Hive Metastore Server"
+  CLASS=org.apache.hadoop.hive.metastore.HiveMetaStore
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+  JAR=${HIVE_LIB}/hive-service-*.jar
+
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar $JAR $CLASS $METASTORE_PORT "$@"
+}
+
+metastore_help() {
+  echo "usage METASTORE_PORT=xxxx ./hive --service metastore"
+  echo "  METASTORE_PORT : Specify the metastore server port"
+}
+
diff --git a/hivesterix/hivesterix-dist/resource/bin/ext/rcfilecat.sh b/hivesterix/hivesterix-dist/resource/bin/ext/rcfilecat.sh
new file mode 100644
index 0000000..3a9264b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/ext/rcfilecat.sh
@@ -0,0 +1,27 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=rcfilecat
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+rcfilecat () {
+  CLASS=org.apache.hadoop.hive.cli.RCFileCat
+  HIVE_OPTS=''
+  execHiveCmd $CLASS "$@"
+}
+
+rcfilecat_help () {
+  echo "usage ./hive rcfilecat [--start='startoffset'] [--length='len'] "
+} 
diff --git a/hivesterix/hivesterix-dist/resource/bin/ext/util/execHiveCmd.sh b/hivesterix/hivesterix-dist/resource/bin/ext/util/execHiveCmd.sh
new file mode 100644
index 0000000..167cc40
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/ext/util/execHiveCmd.sh
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+execHiveCmd () {
+  CLASS=$1;
+  shift;
+
+  # cli specific code
+  if [ ! -f ${HIVE_LIB}/hive-cli-*.jar ]; then
+    echo "Missing Hive CLI Jar"
+    exit 3;
+  fi
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  # hadoop 20 or newer - skip the aux_jars option. picked up from hiveconf
+  exec $HADOOP jar ${HIVE_LIB}/hive-cli-*.jar $CLASS $HIVE_OPTS "$@"
+}
diff --git a/hivesterix/hivesterix-dist/resource/bin/hive b/hivesterix/hivesterix-dist/resource/bin/hive
new file mode 100755
index 0000000..8a83bde
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/hive
@@ -0,0 +1,213 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+cygwin=false
+case "`uname`" in
+   CYGWIN*) cygwin=true;;
+esac
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hive-config.sh
+
+SERVICE=""
+HELP=""
+while [ $# -gt 0 ]; do
+  case "$1" in
+    --service)
+      shift
+      SERVICE=$1
+      shift
+      ;;
+    --rcfilecat)
+      SERVICE=rcfilecat
+      shift
+      ;;
+    --help)
+      HELP=_help
+      shift
+      ;;
+    *)
+      break
+      ;;
+  esac
+done
+
+if [ "$SERVICE" = "" ] ; then
+  if [ "$HELP" = "_help" ] ; then
+    SERVICE="help"
+  else
+    SERVICE="cli"
+  fi
+fi
+
+if [ -f "${HIVE_CONF_DIR}/hive-env.sh" ]; then
+  . "${HIVE_CONF_DIR}/hive-env.sh"
+fi
+
+CLASSPATH="${HIVE_CONF_DIR}"
+
+HIVE_LIB=${HIVE_HOME}/lib
+
+# needed for execution
+if [ ! -f ${HIVE_LIB}/hive-exec-*.jar ]; then
+  echo "Missing Hive Execution Jar: ${HIVE_LIB}/hive-exec-*.jar"
+  exit 1;
+fi
+
+if [ ! -f ${HIVE_LIB}/hive-metastore-*.jar ]; then
+  echo "Missing Hive MetaStore Jar"
+  exit 2;
+fi
+
+# cli specific code
+if [ ! -f ${HIVE_LIB}/hive-cli-*.jar ]; then
+  echo "Missing Hive CLI Jar"
+  exit 3;
+fi
+
+CLASSPATH=${CLASSPATH}:${HIVE_LIB}/algebricks-hivesterix-0.0.1-SNAPSHOT.jar
+
+for f in ${HIVE_LIB}/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add the auxillary jars such as serdes
+if [ -d "${HIVE_AUX_JARS_PATH}" ]; then
+  for f in ${HIVE_AUX_JARS_PATH}/*.jar; do
+    if [[ ! -f $f ]]; then
+        continue;
+    fi
+    if $cygwin; then
+	f=`cygpath -w "$f"`
+    fi
+    AUX_CLASSPATH=${AUX_CLASSPATH}:$f
+    if [ "${AUX_PARAM}" == "" ]; then
+        AUX_PARAM=file://$f
+    else
+        AUX_PARAM=${AUX_PARAM},file://$f;
+    fi
+  done
+elif [ "${HIVE_AUX_JARS_PATH}" != "" ]; then 
+  if $cygwin; then
+      HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/,/:/g'`
+      HIVE_AUX_JARS_PATH=`cygpath -p -w "$HIVE_AUX_JARS_PATH"`
+      HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/;/,/g'`
+  fi
+  AUX_CLASSPATH=${HIVE_AUX_JARS_PATH}
+  AUX_PARAM=file://${HIVE_AUX_JARS_PATH}
+  AUX_PARAM=`echo $AUX_PARAM | sed 's/,/,file:\/\//g'`
+fi
+
+# adding jars from auxlib directory
+for f in ${HIVE_HOME}/auxlib/*.jar; do
+  if [[ ! -f $f ]]; then
+      continue;
+  fi
+  if $cygwin; then
+      f=`cygpath -w "$f"`
+  fi
+  AUX_CLASSPATH=${AUX_CLASSPATH}:$f
+  if [ "${AUX_PARAM}" == "" ]; then
+    AUX_PARAM=file://$f
+  else
+    AUX_PARAM=${AUX_PARAM},file://$f;
+  fi
+done
+if $cygwin; then
+    CLASSPATH=`cygpath -p -w "$CLASSPATH"`
+    CLASSPATH=${CLASSPATH};${AUX_CLASSPATH}
+else
+    CLASSPATH=${CLASSPATH}:${AUX_CLASSPATH}
+fi
+
+# pass classpath to hadoop
+export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${CLASSPATH}"
+
+# check for hadoop in the path
+HADOOP_IN_PATH=`which hadoop 2>/dev/null`
+if [ -f ${HADOOP_IN_PATH} ]; then
+  HADOOP_DIR=`dirname "$HADOOP_IN_PATH"`/..
+fi
+# HADOOP_HOME env variable overrides hadoop in the path
+HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DIR}
+if [ "$HADOOP_HOME" == "" ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+HADOOP=$HADOOP_HOME/bin/hadoop
+if [ ! -f ${HADOOP} ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+# Make sure we're using a compatible version of Hadoop
+hadoop_version=$($HADOOP version | awk '{if (NR == 1) {print $2;}}');
+
+# Save the regex to a var to workaround quoting incompatabilities
+# between Bash 3.1 and 3.2
+hadoop_version_re="^([[:digit:]]+)\.([[:digit:]]+)(\.([[:digit:]]+))?.*$"
+
+if [[ "$hadoop_version" =~ $hadoop_version_re ]]; then
+    hadoop_major_ver=${BASH_REMATCH[1]}
+    hadoop_minor_ver=${BASH_REMATCH[2]}
+    hadoop_patch_ver=${BASH_REMATCH[4]}
+else
+    echo "Unable to determine Hadoop version information."
+    echo "'hadoop version' returned:"
+    echo `$HADOOP version`
+    exit 5
+fi
+
+if [ $hadoop_minor_ver -ne 20 -o $hadoop_patch_ver -eq 0 ]; then
+    echo "Hive requires Hadoop 0.20.x (x >= 1)."
+    echo "'hadoop version' returned:"
+    echo `$HADOOP version`
+    exit 6
+fi
+
+if [ "${AUX_PARAM}" != "" ]; then
+  HIVE_OPTS="$HIVE_OPTS -hiveconf hive.aux.jars.path=${AUX_PARAM}"
+  AUX_JARS_CMD_LINE="-libjars ${AUX_PARAM}"
+fi
+
+SERVICE_LIST=""
+
+for i in "$bin"/ext/*.sh ; do
+  . $i
+done
+
+for i in "$bin"/ext/util/*.sh ; do
+  . $i
+done
+
+TORUN=""
+for j in $SERVICE_LIST ; do
+  if [ "$j" = "$SERVICE" ] ; then
+    TORUN=${j}$HELP
+  fi
+done
+
+if [ "$TORUN" = "" ] ; then
+  echo "Service $SERVICE not found"
+  echo "Available Services: $SERVICE_LIST"
+  exit 7
+else
+  $TORUN "$@"
+fi
diff --git a/hivesterix/hivesterix-dist/resource/bin/hive-config.sh b/hivesterix/hivesterix-dist/resource/bin/hive-config.sh
new file mode 100755
index 0000000..2524bbc
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/hive-config.sh
@@ -0,0 +1,68 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# processes --config option from command line
+#
+
+this="$0"
+while [ -h "$this" ]; do
+  ls=`ls -ld "$this"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '.*/.*' > /dev/null; then
+    this="$link"
+  else
+    this=`dirname "$this"`/"$link"
+  fi
+done
+
+# convert relative path to absolute path
+bin=`dirname "$this"`
+script=`basename "$this"`
+bin=`cd "$bin"; pwd`
+this="$bin/$script"
+
+# the root of the Hadoop installation
+export HIVE_HOME=`dirname "$bin"`
+
+#check to see if the conf dir is given as an optional argument
+while [ $# -gt 0 ]; do    # Until you run out of parameters . . .
+  case "$1" in
+    --config)
+        shift
+        confdir=$1
+        shift
+        HIVE_CONF_DIR=$confdir
+        ;;
+    --auxpath)
+        shift
+        HIVE_AUX_JARS_PATH=$1
+        shift
+        ;;
+    *)
+        break;
+        ;;
+  esac
+done
+
+
+# Allow alternate conf dir location.
+HIVE_CONF_DIR="${HIVE_CONF_DIR:-$HIVE_HOME/conf}"
+
+export HIVE_CONF_DIR=$HIVE_CONF_DIR
+export HIVE_AUX_JARS_PATH=$HIVE_AUX_JARS_PATH
+
+# Default to use 256MB 
+export HADOOP_HEAPSIZE=${HADOOP_HEAPSIZE:-256}
diff --git a/hivesterix/hivesterix-dist/resource/bin/init-hive-dfs.sh b/hivesterix/hivesterix-dist/resource/bin/init-hive-dfs.sh
new file mode 100755
index 0000000..ec3997a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/bin/init-hive-dfs.sh
@@ -0,0 +1,107 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The purpose of this script is to set warehouse's directories on HDFS
+
+DEFAULT_WAREHOUSE_DIR="/user/hive/warehouse"
+DEFAULT_TMP_DIR="/tmp"
+
+WAREHOUSE_DIR=${DEFAULT_WAREHOUSE_DIR}
+TMP_DIR=${DEFAULT_TMP_DIR}
+HELP=""
+while [ $# -gt 0 ]; do
+  case "$1" in
+    --warehouse-dir)
+      shift
+      WAREHOUSE_DIR=$1
+      shift
+      ;;
+    --tmp-dir)
+      shift
+      TMP_DIR=$1
+      shift
+      ;;
+    --help)
+      HELP=_help
+      shift
+      ;;
+    *)
+      echo "Invalid parameter: $1"
+      HELP=_help
+      break
+      ;;
+  esac
+done
+
+if [ "$HELP" = "_help" ] ; then
+  echo "Usage $0 [--warehouse-dir <Hive user>] [--tmp-dir <Tmp dir>]"
+  echo "Default value of warehouse directory is: [$DEFAULT_WAREHOUSE_DIR]"
+  echo "Default value of the temporary directory is: [$DEFAULT_TMP_DIR]"
+  exit -1
+fi
+
+
+# check for hadoop in the path
+HADOOP_IN_PATH=`which hadoop 2>/dev/null`
+if [ -f ${HADOOP_IN_PATH} ]; then
+  HADOOP_DIR=`dirname "$HADOOP_IN_PATH"`/..
+fi
+# HADOOP_HOME env variable overrides hadoop in the path
+HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DIR}
+if [ "$HADOOP_HOME" == "" ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+HADOOP_EXEC=$HADOOP_HOME/bin/hadoop
+if [ ! -f ${HADOOP} ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+
+# Ensure /tmp exist
+$HADOOP_EXEC fs -test -d ${TMP_DIR} > /dev/null 2>&1
+if [ $? -ne 0 ] 
+then
+  echo "Creating directory [${TMP_DIR}]"
+  $HADOOP_EXEC fs -mkdir ${TMP_DIR}
+fi
+
+echo "Setting writeable group rights for directory [${TMP_DIR}]"
+$HADOOP_EXEC fs -chmod g+w ${TMP_DIR}
+
+
+# Ensure warehouse dir exist
+$HADOOP_EXEC fs -test -d ${WAREHOUSE_DIR} > /dev/null 2>&1
+if [ $? -ne 0 ] 
+then
+  echo "Creating directory [${WAREHOUSE_DIR}]"
+  $HADOOP_EXEC fs -mkdir ${WAREHOUSE_DIR}
+fi
+
+echo "Setting writeable group rights for directory [${WAREHOUSE_DIR}]"
+$HADOOP_EXEC fs -chmod g+w ${WAREHOUSE_DIR}
+
+echo "Initialization done."
+echo
+echo "Please, do not forget to set the following configuration properties in hive-site.xml:"
+echo "hive.metastore.warehouse.dir=${WAREHOUSE_DIR}"
+echo "hive.exec.scratchdir=${TMP_DIR}"
+
+exit 0
diff --git a/hivesterix/hivesterix-dist/resource/deploy/balance.jar b/hivesterix/hivesterix-dist/resource/deploy/balance.jar
new file mode 100644
index 0000000..d1bfbee
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/deploy/balance.jar
Binary files differ
diff --git a/hivesterix/hivesterix-dist/resource/deploy/deploy.sh b/hivesterix/hivesterix-dist/resource/deploy/deploy.sh
new file mode 100755
index 0000000..27cfd39
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/deploy/deploy.sh
@@ -0,0 +1,16 @@
+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-anttasks-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-anttasks -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-cli-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-cli -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-common-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-common -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-exec-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-exec -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-hwi-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-hwi -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-jdbc-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-jdbc -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-metastore-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-metastore -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-serde-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-serde -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-service-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-service -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/hive-shims-0.7.0.jar -DgroupId=org.apache.hadoop.hive -DartifactId=hive-shims -Dversion=0.7.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/libthrift.jar -DgroupId=org.apache.thrift -DartifactId=libthrift -Dversion=0.5.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/libfb303.jar -DgroupId=com.facebook -DartifactId=libfb303 -Dversion=0.5.0 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/commons-cli-1.2.jar -DgroupId=org.apache.commons -DartifactId=cli -Dversion=1.2 -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/jdo2-api-2.3-ec.jar -DgroupId=javax -DartifactId=jdo2-api -Dversion=2.3-ec -Dpackaging=jar

+mvn -e deploy:deploy-file -Durl=http://obelix.ics.uci.edu/nexus/content/repositories/third-party/ -DrepositoryId=third-party -Dfile=$HIVE_HOME/lib/log4j-1.2.15.jar -DgroupId=org.apache -DartifactId=log4j -Dversion=1.2.15 -Dpackaging=jar

+

diff --git a/hivesterix/hivesterix-dist/resource/deploy/jar.sh b/hivesterix/hivesterix-dist/resource/deploy/jar.sh
new file mode 100755
index 0000000..a19b1b8
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/deploy/jar.sh
@@ -0,0 +1 @@
+jar cmf manifest.txt .
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/execute.sh b/hivesterix/hivesterix-dist/resource/hivesterix/execute.sh
new file mode 100755
index 0000000..e9f7ace
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/execute.sh
@@ -0,0 +1,13 @@
+LOG=perflog/result.log
+echo "">$LOG
+
+for file in $1/*.hive
+do
+   sleep 10
+   START=$(date +%s)
+   echo $file  
+   ../bin/hive -f $file > perflog/$file
+   END=$(date +%s)
+   DIFF=$(( $END - $START ))
+   echo $file       $DIFF>>$LOG$2
+done
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/loop.sh b/hivesterix/hivesterix-dist/resource/hivesterix/loop.sh
new file mode 100755
index 0000000..f67896c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/loop.sh
@@ -0,0 +1,4 @@
+for((i=1; i<=10; i++))
+do
+	./execute.sh tpch_sample $i
+done
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/perf.sh b/hivesterix/hivesterix-dist/resource/hivesterix/perf.sh
new file mode 100755
index 0000000..c1d5c0a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/perf.sh
@@ -0,0 +1,28 @@
+#asterix/stopall.sh
+
+#$HADOOP_HOME/bin/stop-all.sh
+#$HADOOP_HOME/bin/start-all.sh
+#sleep 10
+
+#asterix/startall.sh
+
+LOG=perflog/result.log
+echo "">$LOG
+
+for file in $1/*.hive 
+do
+    ../asterix/stopall.sh
+    $HADOOP_HOME/bin/stop-all.sh
+	sleep 10
+	../asterix/startall.sh
+	$HADOOP_HOME/bin/start-dfs.sh
+	sleep 10
+	$HADOOP_HOME/bin/hadoop dfsadmin -safemode leave
+
+	START=$(date +%s)
+ 	echo $file  
+ 	../bin/hive -f $file > perflog/$file
+ 	END=$(date +%s)
+	DIFF=$(( $END - $START ))
+	echo $file	 $DIFF>>$LOG
+done
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/perflog/tpch100/result.log b/hivesterix/hivesterix-dist/resource/hivesterix/perflog/tpch100/result.log
new file mode 100644
index 0000000..fbf828d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/perflog/tpch100/result.log
@@ -0,0 +1 @@
+log
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/perflog/tpch_sample/result.log b/hivesterix/hivesterix-dist/resource/hivesterix/perflog/tpch_sample/result.log
new file mode 100644
index 0000000..fbf828d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/perflog/tpch_sample/result.log
@@ -0,0 +1 @@
+log
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/startcluster.sh b/hivesterix/hivesterix-dist/resource/hivesterix/startcluster.sh
new file mode 100755
index 0000000..e7ad708
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/startcluster.sh
@@ -0,0 +1,9 @@
+../asterix/stopall.sh
+$HADOOP_HOME/bin/stop-all.sh
+sleep 10
+../asterix/startall.sh
+$HADOOP_HOME/bin/start-dfs.sh
+sleep 10
+$HADOOP_HOME/bin/hadoop dfsadmin -safemode leave
+rm -rf metastore*
+
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/stress.sh b/hivesterix/hivesterix-dist/resource/hivesterix/stress.sh
new file mode 100755
index 0000000..8dec4c3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/stress.sh
@@ -0,0 +1,5 @@
+for((i=1; i<=3; i++))
+do
+	./startcluster.sh
+	./execute.sh tpch100 $i
+done
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q10_returned_item.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q10_returned_item.hive
new file mode 100644
index 0000000..3fbc0df
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q10_returned_item.hive
@@ -0,0 +1,33 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q10_returned_item;
+
+-- create the tables and load the data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the result table
+create table q10_returned_item (c_custkey int, c_name string, revenue double, c_acctbal string, n_name string, c_address string, c_phone string, c_comment string);
+
+-- the query
+insert overwrite table q10_returned_item
+select 
+  c_custkey, c_name, sum(l_extendedprice * (1 - l_discount)) as revenue, 
+  c_acctbal, n_name, c_address, c_phone, c_comment
+from
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey and o.o_orderdate >= '1993-10-01' and o.o_orderdate < '1994-01-01'
+  join nation n 
+  on 
+    c.c_nationkey = n.n_nationkey
+  join lineitem l 
+  on 
+    l.l_orderkey = o.o_orderkey and l.l_returnflag = 'R'
+group by c_custkey, c_name, c_acctbal, c_phone, n_name, c_address, c_comment 
+order by revenue desc 
+limit 20;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q11_important_stock.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q11_important_stock.hive
new file mode 100644
index 0000000..4f2f340
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q11_important_stock.hive
@@ -0,0 +1,46 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q11_important_stock;
+DROP TABLE IF EXISTS q11_part_tmp;
+DROP TABLE IF EXISTS q11_sum_tmp;
+
+-- create tables and load data
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+
+-- create the target table
+create table q11_important_stock(ps_partkey INT, value DOUBLE);
+create table q11_part_tmp(ps_partkey int, part_value double);
+create table q11_sum_tmp(total_value double);
+
+-- the query
+insert overwrite table q11_part_tmp
+select 
+  ps_partkey, sum(ps_supplycost * ps_availqty) as part_value 
+from
+  nation n join supplier s 
+  on 
+    s.s_nationkey = n.n_nationkey and n.n_name = 'GERMANY'
+  join partsupp ps 
+  on 
+    ps.ps_suppkey = s.s_suppkey
+group by ps_partkey;
+
+insert overwrite table q11_sum_tmp
+select 
+  sum(part_value) as total_value
+from 
+  q11_part_tmp;
+
+insert overwrite table q11_important_stock
+select 
+  ps_partkey, part_value as value
+from
+  (
+    select ps_partkey, part_value, total_value
+    from q11_part_tmp join q11_sum_tmp
+  ) a
+where part_value > total_value * 0.0001
+order by value desc;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q12_shipping.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q12_shipping.hive
new file mode 100644
index 0000000..92f2bcb
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q12_shipping.hive
@@ -0,0 +1,39 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q12_shipping;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create the result table
+create table q12_shipping(l_shipmode string, high_line_count double, low_line_count double);
+
+-- the query
+insert overwrite table q12_shipping
+select 
+  l_shipmode,
+  sum(case
+    when o_orderpriority ='1-URGENT'
+         or o_orderpriority ='2-HIGH'
+    then 1
+    else 0
+end
+  ) as high_line_count,
+  sum(case
+    when o_orderpriority <> '1-URGENT'
+         and o_orderpriority <> '2-HIGH'
+    then 1
+    else 0
+end
+  ) as low_line_count
+from
+  orders o join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey and l.l_commitdate < l.l_receiptdate
+and l.l_shipdate < l.l_commitdate and l.l_receiptdate >= '1994-01-01' 
+and l.l_receiptdate < '1995-01-01'
+where 
+  l.l_shipmode = 'MAIL' or l.l_shipmode = 'SHIP'
+group by l_shipmode
+order by l_shipmode;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q13_customer_distribution.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q13_customer_distribution.hive
new file mode 100644
index 0000000..983a42a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q13_customer_distribution.hive
@@ -0,0 +1,26 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q13_customer_distribution;
+
+-- create the tables and load the data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create the result table
+create table q13_customer_distribution (c_count int, custdist int);
+
+-- the query
+insert overwrite table q13_customer_distribution
+select 
+  c_count, count(1) as custdist
+from 
+  (select 
+     c_custkey, count(o_orderkey) as c_count
+   from 
+     customer c left outer join orders o 
+     on 
+       c.c_custkey = o.o_custkey and not o.o_comment like '%special%requests%'
+   group by c_custkey
+   ) c_orders
+group by c_count
+order by custdist desc, c_count desc;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q14_promotion_effect.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q14_promotion_effect.hive
new file mode 100644
index 0000000..5966255
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q14_promotion_effect.hive
@@ -0,0 +1,24 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q14_promotion_effect;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q14_promotion_effect(promo_revenue double);
+
+-- the query
+insert overwrite table q14_promotion_effect
+select 
+  100.00 * sum(case
+               when p_type like 'PROMO%'
+               then l_extendedprice*(1-l_discount)
+               else 0.0
+               end
+  ) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
+from 
+  part p join lineitem l 
+  on 
+    l.l_partkey = p.p_partkey and l.l_shipdate >= '1995-09-01' and l.l_shipdate < '1995-10-01';
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q15_top_supplier.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q15_top_supplier.hive
new file mode 100644
index 0000000..57d72e5
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q15_top_supplier.hive
@@ -0,0 +1,41 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS revenue;
+DROP TABLE IF EXISTS max_revenue;
+DROP TABLE IF EXISTS q15_top_supplier;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+
+-- create result tables
+create table revenue(supplier_no int, total_revenue double); 
+create table max_revenue(max_revenue double); 
+create table q15_top_supplier(s_suppkey int, s_name string, s_address string, s_phone string, total_revenue double);
+
+-- the query
+insert overwrite table revenue
+select 
+  l_suppkey as supplier_no, sum(l_extendedprice * (1 - l_discount)) as total_revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1996-01-01' and l_shipdate < '1996-04-01'
+group by l_suppkey;
+
+insert overwrite table max_revenue
+select 
+  max(total_revenue)
+from 
+  revenue;
+
+insert overwrite table q15_top_supplier
+select 
+  s_suppkey, s_name, s_address, s_phone, total_revenue
+from supplier s join revenue r 
+  on 
+    s.s_suppkey = r.supplier_no
+  join max_revenue m 
+  on 
+    r.total_revenue = m.max_revenue
+order by s_suppkey;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q16_parts_supplier_relationship.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q16_parts_supplier_relationship.hive
new file mode 100644
index 0000000..ef5d4a1
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q16_parts_supplier_relationship.hive
@@ -0,0 +1,52 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS q16_parts_supplier_relationship;
+DROP TABLE IF EXISTS q16_tmp;
+DROP TABLE IF EXISTS supplier_tmp;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+
+-- create the result table
+create table q16_parts_supplier_relationship(p_brand string, p_type string, p_size int, supplier_cnt int);
+create table q16_tmp(p_brand string, p_type string, p_size int, ps_suppkey int);
+create table supplier_tmp(s_suppkey int);
+
+-- the query
+insert overwrite table supplier_tmp
+select 
+  s_suppkey
+from 
+  supplier
+where 
+  not s_comment like '%Customer%Complaints%';
+
+insert overwrite table q16_tmp
+select 
+  p_brand, p_type, p_size, ps_suppkey
+from 
+  	part p join partsupp ps 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_brand <> 'Brand#45' 
+    and not p.p_type like 'MEDIUM POLISHED%'
+  join supplier_tmp s 
+  on 
+    ps.ps_suppkey = s.s_suppkey;
+
+insert overwrite table q16_parts_supplier_relationship
+select 
+  p_brand, p_type, p_size, count(distinct ps_suppkey) as supplier_cnt
+from 
+  (select 
+     * 
+   from
+     q16_tmp 
+   where p_size = 49 or p_size = 14 or p_size = 23 or
+         p_size = 45 or p_size = 19 or p_size = 3 or
+         p_size = 36 or p_size = 9
+) q16_all
+group by p_brand, p_type, p_size
+order by supplier_cnt desc, p_brand, p_type, p_size;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q17_small_quantity_order_revenue.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q17_small_quantity_order_revenue.hive
new file mode 100644
index 0000000..201b89a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q17_small_quantity_order_revenue.hive
@@ -0,0 +1,38 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q17_small_quantity_order_revenue;
+DROP TABLE IF EXISTS lineitem_tmp;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q17_small_quantity_order_revenue (avg_yearly double);
+create table lineitem_tmp (t_partkey int, t_avg_quantity double);
+
+-- the query
+insert overwrite table lineitem_tmp
+select 
+  l_partkey as t_partkey, 0.2 * avg(l_quantity) as t_avg_quantity
+from 
+  lineitem
+group by l_partkey;
+
+insert overwrite table q17_small_quantity_order_revenue
+select
+  sum(l_extendedprice) / 7.0 as avg_yearly
+from
+  (select l_quantity, l_extendedprice, t_avg_quantity from
+   lineitem_tmp t join
+     (select
+        l_quantity, l_partkey, l_extendedprice
+      from
+        part p join lineitem l
+        on
+          p.p_partkey = l.l_partkey
+          and p.p_brand = 'Brand#23'
+          and p.p_container = 'MED BOX'
+      ) l1 on l1.l_partkey = t.t_partkey
+   ) a
+where l_quantity < t_avg_quantity;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q18_large_volume_customer.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q18_large_volume_customer.hive
new file mode 100644
index 0000000..0daf7cf
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q18_large_volume_customer.hive
@@ -0,0 +1,39 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q18_tmp;
+DROP TABLE IF EXISTS q18_large_volume_customer;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+
+-- create the result tables
+create table q18_tmp(l_orderkey int, t_sum_quantity double);
+create table q18_large_volume_customer(c_name string, c_custkey int, o_orderkey int, o_orderdate string, o_totalprice double, sum_quantity double);
+
+-- the query
+insert overwrite table q18_tmp
+select 
+  l_orderkey, sum(l_quantity) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q18_large_volume_customer
+select 
+  c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice,sum(l_quantity)
+from 
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey
+  join q18_tmp t 
+  on 
+    o.o_orderkey = t.l_orderkey and t.t_sum_quantity > 300
+  join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey
+group by c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice
+order by o_totalprice desc,o_orderdate
+limit 100;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q19_discounted_revenue.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q19_discounted_revenue.hive
new file mode 100644
index 0000000..1c89a36
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q19_discounted_revenue.hive
@@ -0,0 +1,46 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q19_discounted_revenue;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q19_discounted_revenue(revenue double);
+
+-- the query
+insert overwrite table q19_discounted_revenue
+select
+  sum(l_extendedprice * (1 - l_discount) ) as revenue
+from
+  part p join lineitem l
+  on 
+    p.p_partkey = l.l_partkey    
+where
+  (
+    p_brand = 'Brand#12'
+	and p_container REGEXP 'SM CASE||SM BOX||SM PACK||SM PKG'
+	and l_quantity >= 1 and l_quantity <= 11
+	and p_size >= 1 and p_size <= 5
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  ) 
+  or 
+  (
+    p_brand = 'Brand#23'
+	and p_container REGEXP 'MED BAG||MED BOX||MED PKG||MED PACK'
+	and l_quantity >= 10 and l_quantity <= 20
+	and p_size >= 1 and p_size <= 10
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  )
+  or
+  (
+	p_brand = 'Brand#34'
+	and p_container REGEXP 'LG CASE||LG BOX||LG PACK||LG PKG'
+	and l_quantity >= 20 and l_quantity <= 30
+	and p_size >= 1 and p_size <= 15
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  );
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q1_pricing_summary_report.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..f59a074
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q1_pricing_summary_report.hive
@@ -0,0 +1,19 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q1_pricing_summary_report;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+-- the query
+INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q20_potential_part_promotion.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q20_potential_part_promotion.hive
new file mode 100644
index 0000000..2a9fd4a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q20_potential_part_promotion.hive
@@ -0,0 +1,76 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS part;
+DROP TABLE q20_tmp1;
+DROP TABLE q20_tmp2;
+DROP TABLE q20_tmp3;
+DROP TABLE q20_tmp4;
+DROP TABLE q20_potential_part_promotion;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the target table
+create table q20_tmp1(p_partkey int);
+create table q20_tmp2(l_partkey int, l_suppkey int, sum_quantity double);
+create table q20_tmp3(ps_suppkey int, ps_availqty int, sum_quantity double);
+create table q20_tmp4(ps_suppkey int);
+create table q20_potential_part_promotion(s_name string, s_address string);
+
+-- the query
+insert overwrite table q20_tmp1
+select distinct p_partkey
+from
+  part 
+where 
+  p_name like 'forest%';
+
+insert overwrite table q20_tmp2
+select 
+  l_partkey, l_suppkey, 0.5 * sum(l_quantity)
+from
+  lineitem
+where
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+group by l_partkey, l_suppkey;
+
+insert overwrite table q20_tmp3
+select 
+  ps_suppkey, ps_availqty, sum_quantity
+from  
+  partsupp ps join q20_tmp1 t1 
+  on 
+    ps.ps_partkey = t1.p_partkey
+  join q20_tmp2 t2 
+  on 
+    ps.ps_partkey = t2.l_partkey and ps.ps_suppkey = t2.l_suppkey;
+
+insert overwrite table q20_tmp4
+select 
+  ps_suppkey
+from 
+  q20_tmp3
+where 
+  ps_availqty > sum_quantity
+group by ps_suppkey;
+
+insert overwrite table q20_potential_part_promotion
+select 
+  s_name, s_address
+from 
+  	nation n join supplier s
+  on
+    s.s_nationkey = n.n_nationkey
+    and n.n_name = 'CANADA'
+  join q20_tmp4 t4
+  on 
+    s.s_suppkey = t4.ps_suppkey
+order by s_name;
+
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q21_suppliers_who_kept_orders_waiting.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q21_suppliers_who_kept_orders_waiting.hive
new file mode 100644
index 0000000..9381ef3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q21_suppliers_who_kept_orders_waiting.hive
@@ -0,0 +1,73 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q21_tmp1;
+DROP TABLE IF EXISTS q21_tmp2;
+DROP TABLE IF EXISTS q21_suppliers_who_kept_orders_waiting;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create target tables
+create table q21_tmp1(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_tmp2(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_suppliers_who_kept_orders_waiting(s_name string, numwait int);
+
+-- the query
+insert overwrite table q21_tmp1
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q21_tmp2
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+where
+  l_receiptdate > l_commitdate
+group by l_orderkey;
+
+insert overwrite table q21_suppliers_who_kept_orders_waiting
+select
+  s_name, count(1) as numwait
+from
+  (select s_name from
+(select s_name, t2.l_orderkey, l_suppkey, count_suppkey, max_suppkey 
+ from q21_tmp2 t2 right outer join
+      (select s_name, l_orderkey, l_suppkey from
+         (select s_name, t1.l_orderkey, l_suppkey, count_suppkey, max_suppkey
+          from
+            (select s_name, l_orderkey, l_suppkey
+             from 
+               (select s_name, l_orderkey, l_suppkey
+                from
+                  nation n join supplier s
+                  on
+                    s.s_nationkey = n.n_nationkey
+                    and n.n_name = 'SAUDI ARABIA'
+                  join lineitem l
+                  on
+                    s.s_suppkey = l.l_suppkey
+                where
+                  l.l_receiptdate > l.l_commitdate
+                ) l1 join orders o on o.o_orderkey = l1.l_orderkey and o.o_orderstatus = 'F'
+             ) l2 join q21_tmp1 t1 on l2.l_orderkey = t1.l_orderkey
+          ) a
+          where
+           (count_suppkey > 1) or ((count_suppkey=1) and (l_suppkey <> max_suppkey))
+       ) l3 on l3.l_orderkey = t2.l_orderkey
+    ) b
+    where
+     (count_suppkey is null) or ((count_suppkey=1) and (l_suppkey = max_suppkey))
+  )c
+group by s_name
+order by numwait desc, s_name
+limit 100;
+
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q22_global_sales_opportunity.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q22_global_sales_opportunity.hive
new file mode 100644
index 0000000..0b418fa
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q22_global_sales_opportunity.hive
@@ -0,0 +1,70 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q22_customer_tmp;
+DROP TABLE IF EXISTS q22_customer_tmp1;
+DROP TABLE IF EXISTS q22_orders_tmp;
+DROP TABLE IF EXISTS q22_global_sales_opportunity;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create target tables
+create table q22_customer_tmp(c_acctbal double, c_custkey int, cntrycode string);
+create table q22_customer_tmp1(avg_acctbal double);
+create table q22_orders_tmp(o_custkey int);
+create table q22_global_sales_opportunity(cntrycode string, numcust int, totacctbal double);
+
+-- the query
+insert overwrite table q22_customer_tmp
+select 
+  c_acctbal, c_custkey, substr(c_phone, 1, 2) as cntrycode
+from 
+  customer
+where 
+  substr(c_phone, 1, 2) = '13' or
+  substr(c_phone, 1, 2) = '31' or
+  substr(c_phone, 1, 2) = '23' or
+  substr(c_phone, 1, 2) = '29' or
+  substr(c_phone, 1, 2) = '30' or
+  substr(c_phone, 1, 2) = '18' or
+  substr(c_phone, 1, 2) = '17';
+ 
+insert overwrite table q22_customer_tmp1
+select
+  avg(c_acctbal)
+from
+  q22_customer_tmp
+where
+  c_acctbal > 0.00;
+
+insert overwrite table q22_orders_tmp
+select 
+  o_custkey 
+from 
+  orders
+group by 
+  o_custkey;
+
+insert overwrite table q22_global_sales_opportunity
+select
+  cntrycode, count(1) as numcust, sum(c_acctbal) as totacctbal
+from
+(
+  select cntrycode, c_acctbal, avg_acctbal from
+  q22_customer_tmp1 ct1 join
+  (
+    select cntrycode, c_acctbal from
+      q22_orders_tmp ot 
+      right outer join q22_customer_tmp ct 
+      on
+        ct.c_custkey = ot.o_custkey
+    where
+      o_custkey is null
+  ) ct2
+) a
+where
+  c_acctbal > avg_acctbal
+group by cntrycode
+order by cntrycode;
+
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q2_minimum_cost_supplier.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q2_minimum_cost_supplier.hive
new file mode 100644
index 0000000..7d4a41a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q2_minimum_cost_supplier.hive
@@ -0,0 +1,56 @@
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp1;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp2;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_size = 15 and p.p_type like '%BRASS' ;
+
+insert overwrite table q2_minimum_cost_supplier_tmp2 
+select 
+  p_partkey, min(ps_supplycost) 
+from  
+  q2_minimum_cost_supplier_tmp1 
+group by p_partkey;
+
+insert overwrite table q2_minimum_cost_supplier 
+select 
+  t1.s_acctbal, t1.s_name, t1.n_name, t1.p_partkey, t1.p_mfgr, t1.s_address, t1.s_phone, t1.s_comment 
+from 
+  q2_minimum_cost_supplier_tmp1 t1 join q2_minimum_cost_supplier_tmp2 t2 
+on 
+  t1.p_partkey = t2.p_partkey and t1.ps_supplycost=t2.ps_min_supplycost 
+order by s_acctbal desc, n_name, s_name, p_partkey 
+limit 100;
+
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q3_shipping_priority.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q3_shipping_priority.hive
new file mode 100644
index 0000000..2758210
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q3_shipping_priority.hive
@@ -0,0 +1,27 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q3_shipping_priority;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+-- the query
+Insert overwrite table q3_shipping_priority
+select
+  l_orderkey, sum(l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority
+from
+  customer c join orders o
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey
+  join lineitem l
+    on l.l_orderkey = o.o_orderkey
+where
+  o_orderdate < '1995-03-15' and l_shipdate > '1995-03-15'
+group by l_orderkey, o_orderdate, o_shippriority
+order by revenue desc, o_orderdate
+limit 10;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q4_order_priority.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q4_order_priority.hive
new file mode 100644
index 0000000..c8da39b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q4_order_priority.hive
@@ -0,0 +1,30 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q4_order_priority_tmp;
+DROP TABLE IF EXISTS q4_order_priority;
+
+-- create tables and load data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+CREATE TABLE q4_order_priority_tmp (O_ORDERKEY INT);
+CREATE TABLE q4_order_priority (O_ORDERPRIORITY STRING, ORDER_COUNT INT);
+
+-- the query
+INSERT OVERWRITE TABLE q4_order_priority_tmp 
+select 
+  DISTINCT l_orderkey 
+from 
+  lineitem 
+where 
+  l_commitdate < l_receiptdate;
+INSERT OVERWRITE TABLE q4_order_priority 
+select o_orderpriority, count(1) as order_count 
+from 
+  orders o join q4_order_priority_tmp t 
+  on 
+o.o_orderkey = t.o_orderkey and o.o_orderdate >= '1993-07-01' and o.o_orderdate < '1993-10-01' 
+group by o_orderpriority 
+order by o_orderpriority;
+
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q5_local_supplier_volume.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..87a18df
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q5_local_supplier_volume.hive
@@ -0,0 +1,39 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q5_local_supplier_volume;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from 
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from
+        ( select n_name, s_suppkey, s_nationkey from
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey and r.r_name = 'ASIA'
+          ) n1 join supplier s on s.s_nationkey = n1.n_nationkey
+        ) s1  join lineitem l on l.l_suppkey = s1.s_suppkey
+      ) l1 join orders o on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1994-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q6_forecast_revenue_change.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q6_forecast_revenue_change.hive
new file mode 100644
index 0000000..e4e5d79
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q6_forecast_revenue_change.hive
@@ -0,0 +1,20 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q6_forecast_revenue_change;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+create table q6_forecast_revenue_change (revenue double);
+
+-- the query
+insert overwrite table q6_forecast_revenue_change 
+select 
+  sum(l_extendedprice*l_discount) as revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+  and l_discount >= 0.05 and l_discount <= 0.07
+  and l_quantity < 24;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q7_volume_shipping.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q7_volume_shipping.hive
new file mode 100644
index 0000000..c0dd1de
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q7_volume_shipping.hive
@@ -0,0 +1,67 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q7_volume_shipping;
+DROP TABLE IF EXISTS q7_volume_shipping_tmp;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the target table
+create table q7_volume_shipping (supp_nation string, cust_nation string, l_year int, revenue double);
+create table q7_volume_shipping_tmp(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+-- the query
+insert overwrite table q7_volume_shipping_tmp
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY'
+    UNION ALL
+select 
+  n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey, 
+  n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n2.n_name = 'FRANCE' and n1.n_name = 'GERMANY'
+) a;
+
+insert overwrite table q7_volume_shipping 
+select 
+  supp_nation, cust_nation, l_year, sum(volume) as revenue
+from 
+  (
+    select
+      supp_nation, cust_nation, year(l_shipdate) as l_year, 
+      l_extendedprice * (1 - l_discount) as volume
+    from
+      q7_volume_shipping_tmp t join
+        (select l_shipdate, l_extendedprice, l_discount, c_nationkey, s_nationkey 
+         from supplier s join
+           (select l_shipdate, l_extendedprice, l_discount, l_suppkey, c_nationkey 
+            from customer c join
+              (select l_shipdate, l_extendedprice, l_discount, l_suppkey, o_custkey 
+               from orders o join lineitem l 
+               on 
+                 o.o_orderkey = l.l_orderkey and l.l_shipdate >= '1995-01-01' 
+                 and l.l_shipdate <= '1996-12-31'
+               ) l1 on c.c_custkey = l1.o_custkey
+            ) l2 on s.s_suppkey = l2.l_suppkey
+         ) l3 on l3.c_nationkey = t.c_nationkey and l3.s_nationkey = t.s_nationkey
+   ) shipping
+group by supp_nation, cust_nation, l_year
+order by supp_nation, cust_nation, l_year;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q8_national_market_share.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q8_national_market_share.hive
new file mode 100644
index 0000000..d98871f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q8_national_market_share.hive
@@ -0,0 +1,55 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q8_national_market_share;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create the result table
+create table q8_national_market_share(o_year string, mkt_share double);
+
+-- the query
+insert overwrite table q8_national_market_share 
+select 
+  o_year, sum(case when nation = 'BRAZIL' then volume else 0.0 end) / sum(volume) as mkt_share
+from 
+  (
+select 
+  year(o_orderdate) as o_year, l_extendedprice * (1-l_discount) as volume, 
+  n2.n_name as nation
+    from
+      nation n2 join
+        (select o_orderdate, l_discount, l_extendedprice, s_nationkey 
+         from supplier s join
+          (select o_orderdate, l_discount, l_extendedprice, l_suppkey 
+           from part p join
+             (select o_orderdate, l_partkey, l_discount, l_extendedprice, l_suppkey 
+              from 
+                (select o_orderdate, o_orderkey 
+                 from 
+                   (select c.c_custkey 
+                    from 
+                      (select n1.n_nationkey 
+                       from nation n1 join region r
+                       on n1.n_regionkey = r.r_regionkey and r.r_name = 'AMERICA'
+                       ) n11 join customer c on c.c_nationkey = n11.n_nationkey
+                    ) c1 join orders o on c1.c_custkey = o.o_custkey
+                 ) o1 join lineitem l on l.l_orderkey = o1.o_orderkey and o1.o_orderdate >= '1995-01-01' 
+                         and o1.o_orderdate < '1996-12-31'
+              ) l1 on p.p_partkey = l1.l_partkey and p.p_type = 'ECONOMY ANODIZED STEEL'
+           ) p1 on s.s_suppkey = p1.l_suppkey
+        ) s1 on s1.s_nationkey = n2.n_nationkey
+  ) all_nation
+group by o_year
+order by o_year;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q9_product_type_profit.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q9_product_type_profit.hive
new file mode 100644
index 0000000..1d1a19d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch100/q9_product_type_profit.hive
@@ -0,0 +1,46 @@
+DROP TABLE part;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE orders;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE q9_product_type_profit;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 join orders o on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q1_pricing_summary_report.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..1785b48
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q1_pricing_summary_report.hive
@@ -0,0 +1,19 @@
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+-- the query
+INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
+
+DROP TABLE lineitem;
+DROP TABLE q1_pricing_summary_report;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q3_shipping_priority.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q3_shipping_priority.hive
new file mode 100644
index 0000000..1dc68a2
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q3_shipping_priority.hive
@@ -0,0 +1,27 @@
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+-- the query
+Insert overwrite table q3_shipping_priority
+select
+  l_orderkey, sum(l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority
+from
+  customer c join orders o
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey
+  join lineitem l
+    on l.l_orderkey = o.o_orderkey
+where
+  o_orderdate < '1995-03-15' and l_shipdate > '1995-03-15'
+group by l_orderkey, o_orderdate, o_shippriority
+order by revenue desc, o_orderdate
+limit 10;
+
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE customer;
+DROP TABLE q3_shipping_priority;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q5_local_supplier_volume.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..be91a25
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q5_local_supplier_volume.hive
@@ -0,0 +1,39 @@
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from orders o join
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from lineitem l join
+        ( select n_name, s_suppkey, s_nationkey from supplier s join
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey and r.r_name = 'ASIA'
+          ) n1 on s.s_nationkey = n1.n_nationkey
+        ) s1 on l.l_suppkey = s1.s_suppkey
+      ) l1 on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1994-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q5_local_supplier_volume;
diff --git a/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q9_product_type_profit.hive b/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q9_product_type_profit.hive
new file mode 100644
index 0000000..c95a92b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/hivesterix/tpch_sample/q9_product_type_profit.hive
@@ -0,0 +1,47 @@
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      orders o join
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
+
+DROP TABLE part;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE orders;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE q9_product_type_profit;
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q10_returned_item.hive b/hivesterix/hivesterix-dist/resource/tpch/q10_returned_item.hive
new file mode 100644
index 0000000..b6535cb
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q10_returned_item.hive
@@ -0,0 +1,37 @@
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE customer;
+DROP TABLE nation;
+DROP TABLE q10_returned_item;
+
+-- create the tables and load the data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+
+-- create the result table
+create table q10_returned_item (c_custkey int, c_name string, revenue double, c_acctbal string, n_name string, c_address string, c_phone string, c_comment string);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q10_returned_item
+select 
+  c_custkey, c_name, sum(l_extendedprice * (1 - l_discount)) as revenue, 
+  c_acctbal, n_name, c_address, c_phone, c_comment
+from
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey and o.o_orderdate >= '1993-10-01' and o.o_orderdate < '1994-01-01'
+  join nation n 
+  on 
+    c.c_nationkey = n.n_nationkey
+  join lineitem l 
+  on 
+    l.l_orderkey = o.o_orderkey and l.l_returnflag = 'R'
+group by c_custkey, c_name, c_acctbal, c_phone, n_name, c_address, c_comment 
+order by revenue desc 
+limit 20;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q11_important_stock.hive b/hivesterix/hivesterix-dist/resource/tpch/q11_important_stock.hive
new file mode 100644
index 0000000..bfa3743
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q11_important_stock.hive
@@ -0,0 +1,47 @@
+DROP TABLE partsupp;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q11_important_stock;
+DROP TABLE q11_part_tmp;
+DROP TABLE q11_sum_tmp;
+
+-- create tables and load data
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+
+-- create the target table
+create table q11_important_stock(ps_partkey INT, value DOUBLE);
+create table q11_part_tmp(ps_partkey int, part_value double);
+create table q11_sum_tmp(total_value double);
+
+-- the query
+insert overwrite table q11_part_tmp
+select 
+  ps_partkey, sum(ps_supplycost * ps_availqty) as part_value 
+from
+  nation n join supplier s 
+  on 
+    s.s_nationkey = n.n_nationkey and n.n_name = 'GERMANY'
+  join partsupp ps 
+  on 
+    ps.ps_suppkey = s.s_suppkey
+group by ps_partkey;
+
+insert overwrite table q11_sum_tmp
+select 
+  sum(part_value) as total_value
+from 
+  q11_part_tmp;
+
+insert overwrite table q11_important_stock
+select 
+  ps_partkey, part_value as value
+from
+  (
+    select ps_partkey, part_value, total_value
+    from q11_part_tmp join q11_sum_tmp
+  ) a
+where part_value > total_value * 0.0001
+order by value desc;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q12_shipping.hive b/hivesterix/hivesterix-dist/resource/tpch/q12_shipping.hive
new file mode 100644
index 0000000..0ae896c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q12_shipping.hive
@@ -0,0 +1,42 @@
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE q12_shipping;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+
+-- create the result table
+create table q12_shipping(l_shipmode string, high_line_count double, low_line_count double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q12_shipping
+select 
+  l_shipmode,
+  sum(case
+    when o_orderpriority ='1-URGENT'
+         or o_orderpriority ='2-HIGH'
+    then 1
+    else 0
+end
+  ) as high_line_count,
+  sum(case
+    when o_orderpriority <> '1-URGENT'
+         and o_orderpriority <> '2-HIGH'
+    then 1
+    else 0
+end
+  ) as low_line_count
+from
+  orders o join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey and l.l_commitdate < l.l_receiptdate
+and l.l_shipdate < l.l_commitdate and l.l_receiptdate >= '1994-01-01' 
+and l.l_receiptdate < '1995-01-01'
+where 
+  l.l_shipmode = 'MAIL' or l.l_shipmode = 'SHIP'
+group by l_shipmode
+order by l_shipmode;
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q13_customer_distribution.hive b/hivesterix/hivesterix-dist/resource/tpch/q13_customer_distribution.hive
new file mode 100644
index 0000000..dd3674d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q13_customer_distribution.hive
@@ -0,0 +1,27 @@
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE q13_customer_distribution;
+
+-- create the tables and load the data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+
+-- create the result table
+create table q13_customer_distribution (c_count int, custdist int);
+
+-- the query
+insert overwrite table q13_customer_distribution
+select 
+  c_count, count(1) as custdist
+from 
+  (select 
+     c_custkey, count(o_orderkey) as c_count
+   from 
+     customer c left outer join orders o 
+     on 
+       c.c_custkey = o.o_custkey and not o.o_comment like '%special%requests%'
+   group by c_custkey
+   ) c_orders
+group by c_count
+order by custdist desc, c_count desc;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q14_promotion_effect.hive b/hivesterix/hivesterix-dist/resource/tpch/q14_promotion_effect.hive
new file mode 100644
index 0000000..a7ea773
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q14_promotion_effect.hive
@@ -0,0 +1,28 @@
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q14_promotion_effect;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+
+-- create the result table
+create table q14_promotion_effect(promo_revenue double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q14_promotion_effect
+select 
+  100.00 * sum(case
+               when p_type like 'PROMO%'
+               then l_extendedprice*(1-l_discount)
+               else 0.0
+               end
+  ) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
+from 
+  part p join lineitem l 
+  on 
+    l.l_partkey = p.p_partkey and l.l_shipdate >= '1995-09-01' and l.l_shipdate < '1995-10-01';
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q15_top_supplier.hive b/hivesterix/hivesterix-dist/resource/tpch/q15_top_supplier.hive
new file mode 100644
index 0000000..b38ba2c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q15_top_supplier.hive
@@ -0,0 +1,45 @@
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE revenue;
+DROP TABLE max_revenue;
+DROP TABLE q15_top_supplier;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+
+-- create result tables
+create table revenue(supplier_no int, total_revenue double); 
+create table max_revenue(max_revenue double); 
+create table q15_top_supplier(s_suppkey int, s_name string, s_address string, s_phone string, total_revenue double);
+
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table revenue
+select 
+  l_suppkey as supplier_no, sum(l_extendedprice * (1 - l_discount)) as total_revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1996-01-01' and l_shipdate < '1996-04-01'
+group by l_suppkey;
+
+insert overwrite table max_revenue
+select 
+  max(total_revenue)
+from 
+  revenue;
+
+insert overwrite table q15_top_supplier
+select 
+  s_suppkey, s_name, s_address, s_phone, total_revenue
+from supplier s join revenue r 
+  on 
+    s.s_suppkey = r.supplier_no
+  join max_revenue m 
+  on 
+    r.total_revenue = m.max_revenue
+order by s_suppkey;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q16_parts_supplier_relationship.hive b/hivesterix/hivesterix-dist/resource/tpch/q16_parts_supplier_relationship.hive
new file mode 100644
index 0000000..495a5ea
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q16_parts_supplier_relationship.hive
@@ -0,0 +1,53 @@
+DROP TABLE partsupp;
+DROP TABLE part;
+DROP TABLE supplier;
+DROP TABLE q16_parts_supplier_relationship;
+DROP TABLE q16_tmp;
+DROP TABLE supplier_tmp;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+
+-- create the result table
+create table q16_parts_supplier_relationship(p_brand string, p_type string, p_size int, supplier_cnt int);
+create table q16_tmp(p_brand string, p_type string, p_size int, ps_suppkey int);
+create table supplier_tmp(s_suppkey int);
+
+-- the query
+insert overwrite table supplier_tmp
+select 
+  s_suppkey
+from 
+  supplier
+where 
+  not s_comment like '%Customer%Complaints%';
+
+insert overwrite table q16_tmp
+select 
+  p_brand, p_type, p_size, ps_suppkey
+from 
+  partsupp ps join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_brand <> 'Brand#45' 
+    and not p.p_type like 'MEDIUM POLISHED%'
+  join supplier_tmp s 
+  on 
+    ps.ps_suppkey = s.s_suppkey;
+
+insert overwrite table q16_parts_supplier_relationship
+select 
+  p_brand, p_type, p_size, count(distinct ps_suppkey) as supplier_cnt
+from 
+  (select 
+     * 
+   from
+     q16_tmp 
+   where p_size = 49 or p_size = 14 or p_size = 23 or
+         p_size = 45 or p_size = 19 or p_size = 3 or
+         p_size = 36 or p_size = 9
+) q16_all
+group by p_brand, p_type, p_size
+order by supplier_cnt desc, p_brand, p_type, p_size;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q17_small_quantity_order_revenue.hive b/hivesterix/hivesterix-dist/resource/tpch/q17_small_quantity_order_revenue.hive
new file mode 100644
index 0000000..448b8f3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q17_small_quantity_order_revenue.hive
@@ -0,0 +1,38 @@
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q17_small_quantity_order_revenue;
+DROP TABLE lineitem_tmp;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+
+-- create the result table
+create table q17_small_quantity_order_revenue (avg_yearly double);
+create table lineitem_tmp (t_partkey int, t_avg_quantity double);
+
+-- the query
+insert overwrite table lineitem_tmp
+select 
+  l_partkey as t_partkey, 0.2 * avg(l_quantity) as t_avg_quantity
+from 
+  lineitem
+group by l_partkey;
+
+insert overwrite table q17_small_quantity_order_revenue
+select
+  sum(l_extendedprice) / 7.0 as avg_yearly
+from
+  (select l_quantity, l_extendedprice, t_avg_quantity from
+   lineitem_tmp t join
+     (select
+        l_quantity, l_partkey, l_extendedprice
+      from
+        part p join lineitem l
+        on
+          p.p_partkey = l.l_partkey
+          and p.p_brand = 'Brand#23'
+          and p.p_container = 'MED BOX'
+      ) l1 on l1.l_partkey = t.t_partkey
+   ) a
+where l_quantity < t_avg_quantity;
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q18_large_volume_customer.hive b/hivesterix/hivesterix-dist/resource/tpch/q18_large_volume_customer.hive
new file mode 100644
index 0000000..04081ad
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q18_large_volume_customer.hive
@@ -0,0 +1,43 @@
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE customer;
+DROP TABLE q18_tmp;
+DROP TABLE q18_large_volume_customer;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+
+-- create the result tables
+create table q18_tmp(l_orderkey int, t_sum_quantity double);
+create table q18_large_volume_customer(c_name string, c_custkey int, o_orderkey int, o_orderdate string, o_totalprice double, sum_quantity double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1164000000;
+
+-- the query
+insert overwrite table q18_tmp
+select 
+  l_orderkey, sum(l_quantity) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q18_large_volume_customer
+select 
+  c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice,sum(l_quantity)
+from 
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey
+  join q18_tmp t 
+  on 
+    o.o_orderkey = t.l_orderkey and t.t_sum_quantity > 300
+  join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey
+group by c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice
+order by o_totalprice desc,o_orderdate
+limit 100;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q19_discounted_revenue.hive b/hivesterix/hivesterix-dist/resource/tpch/q19_discounted_revenue.hive
new file mode 100644
index 0000000..1e821ca
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q19_discounted_revenue.hive
@@ -0,0 +1,49 @@
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q19_discounted_revenue;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+
+-- create the result table
+create table q19_discounted_revenue(revenue double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q19_discounted_revenue
+select
+  sum(l_extendedprice * (1 - l_discount) ) as revenue
+from
+  lineitem l join part p
+  on 
+    p.p_partkey = l.l_partkey    
+where
+  (
+    p_brand = 'Brand#12'
+	and p_container REGEXP 'SM CASE||SM BOX||SM PACK||SM PKG'
+	and l_quantity >= 1 and l_quantity <= 11
+	and p_size >= 1 and p_size <= 5
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  ) 
+  or 
+  (
+    p_brand = 'Brand#23'
+	and p_container REGEXP 'MED BAG||MED BOX||MED PKG||MED PACK'
+	and l_quantity >= 10 and l_quantity <= 20
+	and p_size >= 1 and p_size <= 10
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  )
+  or
+  (
+	p_brand = 'Brand#34'
+	and p_container REGEXP 'LG CASE||LG BOX||LG PACK||LG PKG'
+	and l_quantity >= 20 and l_quantity <= 30
+	and p_size >= 1 and p_size <= 15
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  );
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q1_pricing_summary_report.hive b/hivesterix/hivesterix-dist/resource/tpch/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..94b2913
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q1_pricing_summary_report.hive
@@ -0,0 +1,21 @@
+DROP TABLE lineitem;
+DROP TABLE q1_pricing_summary_report;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+-- INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q20_potential_part_promotion.hive b/hivesterix/hivesterix-dist/resource/tpch/q20_potential_part_promotion.hive
new file mode 100644
index 0000000..40ae423
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q20_potential_part_promotion.hive
@@ -0,0 +1,77 @@
+DROP TABLE partsupp;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q20_tmp1;
+DROP TABLE q20_tmp2;
+DROP TABLE q20_tmp3;
+DROP TABLE q20_tmp4;
+DROP TABLE q20_potential_part_promotion;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+
+-- create the target table
+create table q20_tmp1(p_partkey int);
+create table q20_tmp2(l_partkey int, l_suppkey int, sum_quantity double);
+create table q20_tmp3(ps_suppkey int, ps_availqty int, sum_quantity double);
+create table q20_tmp4(ps_suppkey int);
+create table q20_potential_part_promotion(s_name string, s_address string);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q20_tmp1
+select distinct p_partkey
+from
+  part 
+where 
+  p_name like 'forest%';
+
+insert overwrite table q20_tmp2
+select 
+  l_partkey, l_suppkey, 0.5 * sum(l_quantity)
+from
+  lineitem
+where
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+group by l_partkey, l_suppkey;
+
+insert overwrite table q20_tmp3
+select 
+  ps_suppkey, ps_availqty, sum_quantity
+from  
+  partsupp ps join q20_tmp1 t1 
+  on 
+    ps.ps_partkey = t1.p_partkey
+  join q20_tmp2 t2 
+  on 
+    ps.ps_partkey = t2.l_partkey and ps.ps_suppkey = t2.l_suppkey;
+
+insert overwrite table q20_tmp4
+select 
+  ps_suppkey
+from 
+  q20_tmp3
+where 
+  ps_availqty > sum_quantity
+group by ps_suppkey;
+
+insert overwrite table q20_potential_part_promotion
+select 
+  s_name, s_address
+from 
+  supplier s join nation n
+  on
+    s.s_nationkey = n.n_nationkey
+    and n.n_name = 'CANADA'
+  join q20_tmp4 t4
+  on 
+    s.s_suppkey = t4.ps_suppkey
+order by s_name;
+
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q21_suppliers_who_kept_orders_waiting.hive b/hivesterix/hivesterix-dist/resource/tpch/q21_suppliers_who_kept_orders_waiting.hive
new file mode 100644
index 0000000..0418540
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q21_suppliers_who_kept_orders_waiting.hive
@@ -0,0 +1,74 @@
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q21_tmp1;
+DROP TABLE q21_tmp2;
+DROP TABLE q21_suppliers_who_kept_orders_waiting;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+
+-- create target tables
+create table q21_tmp1(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_tmp2(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_suppliers_who_kept_orders_waiting(s_name string, numwait int);
+
+-- the query
+insert overwrite table q21_tmp1
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q21_tmp2
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+where
+  l_receiptdate > l_commitdate
+group by l_orderkey;
+
+insert overwrite table q21_suppliers_who_kept_orders_waiting
+select
+  s_name, count(1) as numwait
+from
+  (select s_name from
+(select s_name, t2.l_orderkey, l_suppkey, count_suppkey, max_suppkey 
+ from q21_tmp2 t2 right outer join
+      (select s_name, l_orderkey, l_suppkey from
+         (select s_name, t1.l_orderkey, l_suppkey, count_suppkey, max_suppkey
+          from
+            q21_tmp1 t1 join
+            (select s_name, l_orderkey, l_suppkey
+             from 
+               orders o join
+               (select s_name, l_orderkey, l_suppkey
+                from
+                  nation n join supplier s
+                  on
+                    s.s_nationkey = n.n_nationkey
+                    and n.n_name = 'SAUDI ARABIA'
+                  join lineitem l
+                  on
+                    s.s_suppkey = l.l_suppkey
+                where
+                  l.l_receiptdate > l.l_commitdate
+                ) l1 on o.o_orderkey = l1.l_orderkey and o.o_orderstatus = 'F'
+             ) l2 on l2.l_orderkey = t1.l_orderkey
+          ) a
+          where
+           (count_suppkey > 1) or ((count_suppkey=1) and (l_suppkey <> max_suppkey))
+       ) l3 on l3.l_orderkey = t2.l_orderkey
+    ) b
+    where
+     (count_suppkey is null) or ((count_suppkey=1) and (l_suppkey = max_suppkey))
+  )c
+group by s_name
+order by numwait desc, s_name
+limit 100;
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q22_global_sales_opportunity.hive b/hivesterix/hivesterix-dist/resource/tpch/q22_global_sales_opportunity.hive
new file mode 100644
index 0000000..379cfc9
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q22_global_sales_opportunity.hive
@@ -0,0 +1,70 @@
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE q22_customer_tmp;
+DROP TABLE q22_customer_tmp1;
+DROP TABLE q22_orders_tmp;
+DROP TABLE q22_global_sales_opportunity;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+
+-- create target tables
+create table q22_customer_tmp(c_acctbal double, c_custkey int, cntrycode string);
+create table q22_customer_tmp1(avg_acctbal double);
+create table q22_orders_tmp(o_custkey int);
+create table q22_global_sales_opportunity(cntrycode string, numcust int, totacctbal double);
+
+-- the query
+insert overwrite table q22_customer_tmp
+select 
+  c_acctbal, c_custkey, substr(c_phone, 1, 2) as cntrycode
+from 
+  customer
+where 
+  substr(c_phone, 1, 2) = '13' or
+  substr(c_phone, 1, 2) = '31' or
+  substr(c_phone, 1, 2) = '23' or
+  substr(c_phone, 1, 2) = '29' or
+  substr(c_phone, 1, 2) = '30' or
+  substr(c_phone, 1, 2) = '18' or
+  substr(c_phone, 1, 2) = '17';
+ 
+insert overwrite table q22_customer_tmp1
+select
+  avg(c_acctbal)
+from
+  q22_customer_tmp
+where
+  c_acctbal > 0.00;
+
+insert overwrite table q22_orders_tmp
+select 
+  o_custkey 
+from 
+  orders
+group by 
+  o_custkey;
+
+insert overwrite table q22_global_sales_opportunity
+select
+  cntrycode, count(1) as numcust, sum(c_acctbal) as totacctbal
+from
+(
+  select cntrycode, c_acctbal, avg_acctbal from
+  q22_customer_tmp1 ct1 join
+  (
+    select cntrycode, c_acctbal from
+      q22_orders_tmp ot 
+      right outer join q22_customer_tmp ct 
+      on
+        ct.c_custkey = ot.o_custkey
+    where
+      o_custkey is null
+  ) ct2
+) a
+where
+  c_acctbal > avg_acctbal
+group by cntrycode
+order by cntrycode;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q2_copy.hive b/hivesterix/hivesterix-dist/resource/tpch/q2_copy.hive
new file mode 100644
index 0000000..647d500
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q2_copy.hive
@@ -0,0 +1,46 @@
+DROP TABLE part;
+DROP TABLE supplier;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q2_minimum_cost_supplier;
+DROP TABLE q2_minimum_cost_supplier_tmp1;
+DROP TABLE q2_minimum_cost_supplier_tmp2;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and  p.p_size = 15 ;
+
+-- explain insert overwrite table q2_minimum_cost_supplier_tmp2
+-- select
+--  p_partkey, min(ps_supplycost)
+-- from
+--  q2_minimum_cost_supplier_tmp1
+-- group by p_partkey
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q2_minimum_cost_supplier.hive b/hivesterix/hivesterix-dist/resource/tpch/q2_minimum_cost_supplier.hive
new file mode 100644
index 0000000..7a68ee2
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q2_minimum_cost_supplier.hive
@@ -0,0 +1,56 @@
+DROP TABLE part;
+DROP TABLE supplier;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q2_minimum_cost_supplier;
+DROP TABLE q2_minimum_cost_supplier_tmp1;
+DROP TABLE q2_minimum_cost_supplier_tmp2;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_size = 15 and p.p_type like '%BRASS' ;
+
+insert overwrite table q2_minimum_cost_supplier_tmp2 
+select 
+  p_partkey, min(ps_supplycost) 
+from  
+  q2_minimum_cost_supplier_tmp1 
+group by p_partkey;
+
+insert overwrite table q2_minimum_cost_supplier 
+select 
+  t1.s_acctbal, t1.s_name, t1.n_name, t1.p_partkey, t1.p_mfgr, t1.s_address, t1.s_phone, t1.s_comment 
+from 
+  q2_minimum_cost_supplier_tmp1 t1 join q2_minimum_cost_supplier_tmp2 t2 
+on 
+  t1.p_partkey = t2.p_partkey and t1.ps_supplycost=t2.ps_min_supplycost 
+order by s_acctbal desc, n_name, s_name, p_partkey 
+limit 100;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q3_shipping_priority.hive b/hivesterix/hivesterix-dist/resource/tpch/q3_shipping_priority.hive
new file mode 100644
index 0000000..888775e
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q3_shipping_priority.hive
@@ -0,0 +1,30 @@
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE customer;
+DROP TABLE q3_shipping_priority;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+Insert overwrite table q3_shipping_priority
+select
+  l_orderkey, sum(l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority
+from
+  customer c join orders o
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey
+  join lineitem l
+    on l.l_orderkey = o.o_orderkey
+where
+  o_orderdate < '1995-03-15' and l_shipdate > '1995-03-15'
+group by l_orderkey, o_orderdate, o_shippriority
+order by revenue desc, o_orderdate
+limit 10;
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q4_order_priority.hive b/hivesterix/hivesterix-dist/resource/tpch/q4_order_priority.hive
new file mode 100644
index 0000000..18c8d9d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q4_order_priority.hive
@@ -0,0 +1,30 @@
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE q4_order_priority_tmp;
+DROP TABLE q4_order_priority;
+
+-- create tables and load data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+
+-- create the target table
+CREATE TABLE q4_order_priority_tmp (O_ORDERKEY INT);
+CREATE TABLE q4_order_priority (O_ORDERPRIORITY STRING, ORDER_COUNT INT);
+
+set mapred.min.split.size=536870912;
+-- the query
+INSERT OVERWRITE TABLE q4_order_priority_tmp 
+select 
+  DISTINCT l_orderkey 
+from 
+  lineitem 
+where 
+  l_commitdate < l_receiptdate;
+INSERT OVERWRITE TABLE q4_order_priority 
+select o_orderpriority, count(1) as order_count 
+from 
+  orders o join q4_order_priority_tmp t 
+  on 
+o.o_orderkey = t.o_orderkey and o.o_orderdate >= '1993-07-01' and o.o_orderdate < '1993-10-01' 
+group by o_orderpriority 
+order by o_orderpriority;
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q5_local_supplier_volume.hive b/hivesterix/hivesterix-dist/resource/tpch/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..f5b10d8
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q5_local_supplier_volume.hive
@@ -0,0 +1,42 @@
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q5_local_supplier_volume;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from orders o join
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from lineitem l join
+        ( select n_name, s_suppkey, s_nationkey from supplier s join
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey and r.r_name = 'ASIA'
+          ) n1 on s.s_nationkey = n1.n_nationkey
+        ) s1 on l.l_suppkey = s1.s_suppkey
+      ) l1 on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1994-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q6_forecast_revenue_change.hive b/hivesterix/hivesterix-dist/resource/tpch/q6_forecast_revenue_change.hive
new file mode 100644
index 0000000..72900c7
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q6_forecast_revenue_change.hive
@@ -0,0 +1,21 @@
+DROP TABLE lineitem;
+DROP TABLE q6_forecast_revenue_change;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+
+-- create the target table
+create table q6_forecast_revenue_change (revenue double);
+
+-- the query
+insert overwrite table q6_forecast_revenue_change 
+select 
+  sum(l_extendedprice*l_discount) as revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+  and l_discount >= 0.05 and l_discount <= 0.07
+  and l_quantity < 24;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q7_volume_shipping.hive b/hivesterix/hivesterix-dist/resource/tpch/q7_volume_shipping.hive
new file mode 100644
index 0000000..da6eab2
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q7_volume_shipping.hive
@@ -0,0 +1,71 @@
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q7_volume_shipping;
+DROP TABLE q7_volume_shipping_tmp;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+
+-- create the target table
+create table q7_volume_shipping (supp_nation string, cust_nation string, l_year int, revenue double);
+create table q7_volume_shipping_tmp(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q7_volume_shipping_tmp
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY'
+    UNION ALL
+select 
+  n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey, 
+  n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n2.n_name = 'FRANCE' and n1.n_name = 'GERMANY'
+) a;
+
+insert overwrite table q7_volume_shipping 
+select 
+  supp_nation, cust_nation, l_year, sum(volume) as revenue
+from 
+  (
+    select
+      supp_nation, cust_nation, year(l_shipdate) as l_year, 
+      l_extendedprice * (1 - l_discount) as volume
+    from
+      q7_volume_shipping_tmp t join
+        (select l_shipdate, l_extendedprice, l_discount, c_nationkey, s_nationkey 
+         from supplier s join
+           (select l_shipdate, l_extendedprice, l_discount, l_suppkey, c_nationkey 
+            from customer c join
+              (select l_shipdate, l_extendedprice, l_discount, l_suppkey, o_custkey 
+               from orders o join lineitem l 
+               on 
+                 o.o_orderkey = l.l_orderkey and l.l_shipdate >= '1995-01-01' 
+                 and l.l_shipdate <= '1996-12-31'
+               ) l1 on c.c_custkey = l1.o_custkey
+            ) l2 on s.s_suppkey = l2.l_suppkey
+         ) l3 on l3.c_nationkey = t.c_nationkey and l3.s_nationkey = t.s_nationkey
+   ) shipping
+group by supp_nation, cust_nation, l_year
+order by supp_nation, cust_nation, l_year;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q8_national_market_share.hive b/hivesterix/hivesterix-dist/resource/tpch/q8_national_market_share.hive
new file mode 100644
index 0000000..ae2abec
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q8_national_market_share.hive
@@ -0,0 +1,56 @@
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE part;
+DROP TABLE q8_national_market_share;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/region';
+
+-- create the result table
+create table q8_national_market_share(o_year string, mkt_share double);
+
+-- the query
+insert overwrite table q8_national_market_share 
+select 
+  o_year, sum(case when nation = 'BRAZIL' then volume else 0.0 end) / sum(volume) as mkt_share
+from 
+  (
+select 
+  year(o_orderdate) as o_year, l_extendedprice * (1-l_discount) as volume, 
+  n2.n_name as nation
+    from
+      nation n2 join
+        (select o_orderdate, l_discount, l_extendedprice, s_nationkey 
+         from supplier s join
+          (select o_orderdate, l_discount, l_extendedprice, l_suppkey 
+           from part p join
+             (select o_orderdate, l_partkey, l_discount, l_extendedprice, l_suppkey 
+              from lineitem l join
+                (select o_orderdate, o_orderkey 
+                 from orders o join
+                   (select c.c_custkey 
+                    from customer c join
+                      (select n1.n_nationkey 
+                       from nation n1 join region r
+                       on n1.n_regionkey = r.r_regionkey and r.r_name = 'AMERICA'
+                       ) n11 on c.c_nationkey = n11.n_nationkey
+                    ) c1 on c1.c_custkey = o.o_custkey
+                 ) o1 on l.l_orderkey = o1.o_orderkey and o1.o_orderdate >= '1995-01-01' 
+                         and o1.o_orderdate < '1996-12-31'
+              ) l1 on p.p_partkey = l1.l_partkey and p.p_type = 'ECONOMY ANODIZED STEEL'
+           ) p1 on s.s_suppkey = p1.l_suppkey
+        ) s1 on s1.s_nationkey = n2.n_nationkey
+  ) all_nation
+group by o_year
+order by o_year;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch/q9_product_type_profit.hive b/hivesterix/hivesterix-dist/resource/tpch/q9_product_type_profit.hive
new file mode 100644
index 0000000..bc8ba3f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch/q9_product_type_profit.hive
@@ -0,0 +1,51 @@
+DROP TABLE part;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE orders;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE q9_product_type_profit;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/10/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/10/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      orders o join
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
+
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q10_returned_item.hive b/hivesterix/hivesterix-dist/resource/tpch100/q10_returned_item.hive
new file mode 100644
index 0000000..1e4e3c6
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q10_returned_item.hive
@@ -0,0 +1,36 @@
+-- create the tables and load the data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the result table
+create table q10_returned_item (c_custkey int, c_name string, revenue double, c_acctbal string, n_name string, c_address string, c_phone string, c_comment string);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q10_returned_item
+select 
+  c_custkey, c_name, sum(l_extendedprice * (1 - l_discount)) as revenue, 
+  c_acctbal, n_name, c_address, c_phone, c_comment
+from
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey and o.o_orderdate >= '1993-10-01' and o.o_orderdate < '1994-01-01'
+  join nation n 
+  on 
+    c.c_nationkey = n.n_nationkey
+  join lineitem l 
+  on 
+    l.l_orderkey = o.o_orderkey and l.l_returnflag = 'R'
+group by c_custkey, c_name, c_acctbal, c_phone, n_name, c_address, c_comment 
+order by revenue desc 
+limit 20;
+
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE customer;
+DROP TABLE nation;
+DROP TABLE q10_returned_item;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q11_important_stock.hive b/hivesterix/hivesterix-dist/resource/tpch100/q11_important_stock.hive
new file mode 100644
index 0000000..271b614
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q11_important_stock.hive
@@ -0,0 +1,46 @@
+-- create tables and load data
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+
+-- create the target table
+create table q11_important_stock(ps_partkey INT, value DOUBLE);
+create table q11_part_tmp(ps_partkey int, part_value double);
+create table q11_sum_tmp(total_value double);
+
+-- the query
+insert overwrite table q11_part_tmp
+select 
+  ps_partkey, sum(ps_supplycost * ps_availqty) as part_value 
+from
+  nation n join supplier s 
+  on 
+    s.s_nationkey = n.n_nationkey and n.n_name = 'GERMANY'
+  join partsupp ps 
+  on 
+    ps.ps_suppkey = s.s_suppkey
+group by ps_partkey;
+
+insert overwrite table q11_sum_tmp
+select 
+  sum(part_value) as total_value
+from 
+  q11_part_tmp;
+
+insert overwrite table q11_important_stock
+select 
+  ps_partkey, part_value as value
+from
+  (
+    select ps_partkey, part_value, total_value
+    from q11_part_tmp join q11_sum_tmp
+  ) a
+where part_value > total_value * 0.0001
+order by value desc;
+
+DROP TABLE partsupp;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q11_important_stock;
+DROP TABLE q11_part_tmp;
+DROP TABLE q11_sum_tmp;
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q12_shipping.hive b/hivesterix/hivesterix-dist/resource/tpch100/q12_shipping.hive
new file mode 100644
index 0000000..cd5c8aa
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q12_shipping.hive
@@ -0,0 +1,42 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create the result table
+create table q12_shipping(l_shipmode string, high_line_count double, low_line_count double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q12_shipping
+select 
+  l_shipmode,
+  sum(case
+    when o_orderpriority ='1-URGENT'
+         or o_orderpriority ='2-HIGH'
+    then 1
+    else 0
+end
+  ) as high_line_count,
+  sum(case
+    when o_orderpriority <> '1-URGENT'
+         and o_orderpriority <> '2-HIGH'
+    then 1
+    else 0
+end
+  ) as low_line_count
+from
+  orders o join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey and l.l_commitdate < l.l_receiptdate
+and l.l_shipdate < l.l_commitdate and l.l_receiptdate >= '1994-01-01' 
+and l.l_receiptdate < '1995-01-01'
+where 
+  l.l_shipmode = 'MAIL' or l.l_shipmode = 'SHIP'
+group by l_shipmode
+order by l_shipmode;
+
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE q12_shipping;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q13_customer_distribution.hive b/hivesterix/hivesterix-dist/resource/tpch100/q13_customer_distribution.hive
new file mode 100644
index 0000000..dc7f832
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q13_customer_distribution.hive
@@ -0,0 +1,26 @@
+-- create the tables and load the data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create the result table
+create table q13_customer_distribution (c_count int, custdist int);
+
+-- the query
+insert overwrite table q13_customer_distribution
+select 
+  c_count, count(1) as custdist
+from 
+  (select 
+     c_custkey, count(o_orderkey) as c_count
+   from 
+     customer c left outer join orders o 
+     on 
+       c.c_custkey = o.o_custkey and not o.o_comment like '%special%requests%'
+   group by c_custkey
+   ) c_orders
+group by c_count
+order by custdist desc, c_count desc;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE q13_customer_distribution;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q14_promotion_effect.hive b/hivesterix/hivesterix-dist/resource/tpch100/q14_promotion_effect.hive
new file mode 100644
index 0000000..ca6d6b2
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q14_promotion_effect.hive
@@ -0,0 +1,27 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q14_promotion_effect(promo_revenue double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q14_promotion_effect
+select 
+  100.00 * sum(case
+               when p_type like 'PROMO%'
+               then l_extendedprice*(1-l_discount)
+               else 0.0
+               end
+  ) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
+from 
+  part p join lineitem l 
+  on 
+    l.l_partkey = p.p_partkey and l.l_shipdate >= '1995-09-01' and l.l_shipdate < '1995-10-01';
+
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q14_promotion_effect;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q15_top_supplier.hive b/hivesterix/hivesterix-dist/resource/tpch100/q15_top_supplier.hive
new file mode 100644
index 0000000..d3d73c3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q15_top_supplier.hive
@@ -0,0 +1,44 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+
+-- create result tables
+create table revenue(supplier_no int, total_revenue double); 
+create table max_revenue(max_revenue double); 
+create table q15_top_supplier(s_suppkey int, s_name string, s_address string, s_phone string, total_revenue double);
+
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table revenue
+select 
+  l_suppkey as supplier_no, sum(l_extendedprice * (1 - l_discount)) as total_revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1996-01-01' and l_shipdate < '1996-04-01'
+group by l_suppkey;
+
+insert overwrite table max_revenue
+select 
+  max(total_revenue)
+from 
+  revenue;
+
+insert overwrite table q15_top_supplier
+select 
+  s_suppkey, s_name, s_address, s_phone, total_revenue
+from supplier s join revenue r 
+  on 
+    s.s_suppkey = r.supplier_no
+  join max_revenue m 
+  on 
+    r.total_revenue = m.max_revenue
+order by s_suppkey;
+
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE revenue;
+DROP TABLE max_revenue;
+DROP TABLE q15_top_supplier;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q16_parts_supplier_relationship.hive b/hivesterix/hivesterix-dist/resource/tpch100/q16_parts_supplier_relationship.hive
new file mode 100644
index 0000000..b551581
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q16_parts_supplier_relationship.hive
@@ -0,0 +1,52 @@
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+
+-- create the result table
+create table q16_parts_supplier_relationship(p_brand string, p_type string, p_size int, supplier_cnt int);
+create table q16_tmp(p_brand string, p_type string, p_size int, ps_suppkey int);
+create table supplier_tmp(s_suppkey int);
+
+-- the query
+insert overwrite table supplier_tmp
+select 
+  s_suppkey
+from 
+  supplier
+where 
+  not s_comment like '%Customer%Complaints%';
+
+insert overwrite table q16_tmp
+select 
+  p_brand, p_type, p_size, ps_suppkey
+from 
+  partsupp ps join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_brand <> 'Brand#45' 
+    and not p.p_type like 'MEDIUM POLISHED%'
+  join supplier_tmp s 
+  on 
+    ps.ps_suppkey = s.s_suppkey;
+
+insert overwrite table q16_parts_supplier_relationship
+select 
+  p_brand, p_type, p_size, count(distinct ps_suppkey) as supplier_cnt
+from 
+  (select 
+     * 
+   from
+     q16_tmp 
+   where p_size = 49 or p_size = 14 or p_size = 23 or
+         p_size = 45 or p_size = 19 or p_size = 3 or
+         p_size = 36 or p_size = 9
+) q16_all
+group by p_brand, p_type, p_size
+order by supplier_cnt desc, p_brand, p_type, p_size;
+
+DROP TABLE partsupp;
+DROP TABLE part;
+DROP TABLE supplier;
+DROP TABLE q16_parts_supplier_relationship;
+DROP TABLE q16_tmp;
+DROP TABLE supplier_tmp;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q17_small_quantity_order_revenue.hive b/hivesterix/hivesterix-dist/resource/tpch100/q17_small_quantity_order_revenue.hive
new file mode 100644
index 0000000..14e87db
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q17_small_quantity_order_revenue.hive
@@ -0,0 +1,38 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q17_small_quantity_order_revenue (avg_yearly double);
+create table lineitem_tmp (t_partkey int, t_avg_quantity double);
+
+-- the query
+insert overwrite table lineitem_tmp
+select 
+  l_partkey as t_partkey, 0.2 * avg(l_quantity) as t_avg_quantity
+from 
+  lineitem
+group by l_partkey;
+
+insert overwrite table q17_small_quantity_order_revenue
+select
+  sum(l_extendedprice) / 7.0 as avg_yearly
+from
+  (select l_quantity, l_extendedprice, t_avg_quantity from
+   lineitem_tmp t join
+     (select
+        l_quantity, l_partkey, l_extendedprice
+      from
+        part p join lineitem l
+        on
+          p.p_partkey = l.l_partkey
+          and p.p_brand = 'Brand#23'
+          and p.p_container = 'MED BOX'
+      ) l1 on l1.l_partkey = t.t_partkey
+   ) a
+where l_quantity < t_avg_quantity;
+
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q17_small_quantity_order_revenue;
+DROP TABLE lineitem_tmp;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q18_large_volume_customer.hive b/hivesterix/hivesterix-dist/resource/tpch100/q18_large_volume_customer.hive
new file mode 100644
index 0000000..f61bd79
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q18_large_volume_customer.hive
@@ -0,0 +1,42 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+
+-- create the result tables
+create table q18_tmp(l_orderkey int, t_sum_quantity double);
+create table q18_large_volume_customer(c_name string, c_custkey int, o_orderkey int, o_orderdate string, o_totalprice double, sum_quantity double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1164000000;
+
+-- the query
+insert overwrite table q18_tmp
+select 
+  l_orderkey, sum(l_quantity) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q18_large_volume_customer
+select 
+  c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice,sum(l_quantity)
+from 
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey
+  join q18_tmp t 
+  on 
+    o.o_orderkey = t.l_orderkey and t.t_sum_quantity > 300
+  join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey
+group by c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice
+order by o_totalprice desc,o_orderdate
+limit 100;
+
+DROP TABLE lineitem;
+DROP TABLE orders;
+DROP TABLE customer;
+DROP TABLE q18_tmp;
+DROP TABLE q18_large_volume_customer;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q19_discounted_revenue.hive b/hivesterix/hivesterix-dist/resource/tpch100/q19_discounted_revenue.hive
new file mode 100644
index 0000000..cb77a06
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q19_discounted_revenue.hive
@@ -0,0 +1,49 @@
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the result table
+create table q19_discounted_revenue(revenue double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q19_discounted_revenue
+select
+  sum(l_extendedprice * (1 - l_discount) ) as revenue
+from
+  lineitem l join part p
+  on 
+    p.p_partkey = l.l_partkey    
+where
+  (
+    p_brand = 'Brand#12'
+	and p_container REGEXP 'SM CASE||SM BOX||SM PACK||SM PKG'
+	and l_quantity >= 1 and l_quantity <= 11
+	and p_size >= 1 and p_size <= 5
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  ) 
+  or 
+  (
+    p_brand = 'Brand#23'
+	and p_container REGEXP 'MED BAG||MED BOX||MED PKG||MED PACK'
+	and l_quantity >= 10 and l_quantity <= 20
+	and p_size >= 1 and p_size <= 10
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  )
+  or
+  (
+	p_brand = 'Brand#34'
+	and p_container REGEXP 'LG CASE||LG BOX||LG PACK||LG PKG'
+	and l_quantity >= 20 and l_quantity <= 30
+	and p_size >= 1 and p_size <= 15
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  );
+
+DROP TABLE lineitem;
+DROP TABLE part;
+DROP TABLE q19_discounted_revenue;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q1_pricing_summary_report.hive b/hivesterix/hivesterix-dist/resource/tpch100/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..1899b5c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q1_pricing_summary_report.hive
@@ -0,0 +1,21 @@
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
+
+DROP TABLE lineitem;
+DROP TABLE q1_pricing_summary_report;
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q20_potential_part_promotion.hive b/hivesterix/hivesterix-dist/resource/tpch100/q20_potential_part_promotion.hive
new file mode 100644
index 0000000..d254793
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q20_potential_part_promotion.hive
@@ -0,0 +1,76 @@
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+
+-- create the target table
+create table q20_tmp1(p_partkey int);
+create table q20_tmp2(l_partkey int, l_suppkey int, sum_quantity double);
+create table q20_tmp3(ps_suppkey int, ps_availqty int, sum_quantity double);
+create table q20_tmp4(ps_suppkey int);
+create table q20_potential_part_promotion(s_name string, s_address string);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q20_tmp1
+select distinct p_partkey
+from
+  part 
+where 
+  p_name like 'forest%';
+
+insert overwrite table q20_tmp2
+select 
+  l_partkey, l_suppkey, 0.5 * sum(l_quantity)
+from
+  lineitem
+where
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+group by l_partkey, l_suppkey;
+
+insert overwrite table q20_tmp3
+select 
+  ps_suppkey, ps_availqty, sum_quantity
+from  
+  partsupp ps join q20_tmp1 t1 
+  on 
+    ps.ps_partkey = t1.p_partkey
+  join q20_tmp2 t2 
+  on 
+    ps.ps_partkey = t2.l_partkey and ps.ps_suppkey = t2.l_suppkey;
+
+insert overwrite table q20_tmp4
+select 
+  ps_suppkey
+from 
+  q20_tmp3
+where 
+  ps_availqty > sum_quantity
+group by ps_suppkey;
+
+insert overwrite table q20_potential_part_promotion
+select 
+  s_name, s_address
+from 
+  supplier s join nation n
+  on
+    s.s_nationkey = n.n_nationkey
+    and n.n_name = 'CANADA'
+  join q20_tmp4 t4
+  on 
+    s.s_suppkey = t4.ps_suppkey
+order by s_name;
+
+DROP TABLE partsupp;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q20_tmp1;
+DROP TABLE q20_tmp2;
+DROP TABLE q20_tmp3;
+DROP TABLE q20_tmp4;
+DROP TABLE q20_potential_part_promotion;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q21_suppliers_who_kept_orders_waiting.hive b/hivesterix/hivesterix-dist/resource/tpch100/q21_suppliers_who_kept_orders_waiting.hive
new file mode 100644
index 0000000..6e0344c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q21_suppliers_who_kept_orders_waiting.hive
@@ -0,0 +1,74 @@
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create target tables
+create table q21_tmp1(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_tmp2(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_suppliers_who_kept_orders_waiting(s_name string, numwait int);
+
+-- the query
+insert overwrite table q21_tmp1
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q21_tmp2
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+where
+  l_receiptdate > l_commitdate
+group by l_orderkey;
+
+insert overwrite table q21_suppliers_who_kept_orders_waiting
+select
+  s_name, count(1) as numwait
+from
+  (select s_name from
+(select s_name, t2.l_orderkey, l_suppkey, count_suppkey, max_suppkey 
+ from q21_tmp2 t2 right outer join
+      (select s_name, l_orderkey, l_suppkey from
+         (select s_name, t1.l_orderkey, l_suppkey, count_suppkey, max_suppkey
+          from
+            q21_tmp1 t1 join
+            (select s_name, l_orderkey, l_suppkey
+             from 
+               orders o join
+               (select s_name, l_orderkey, l_suppkey
+                from
+                  nation n join supplier s
+                  on
+                    s.s_nationkey = n.n_nationkey
+                    and n.n_name = 'SAUDI ARABIA'
+                  join lineitem l
+                  on
+                    s.s_suppkey = l.l_suppkey
+                where
+                  l.l_receiptdate > l.l_commitdate
+                ) l1 on o.o_orderkey = l1.l_orderkey and o.o_orderstatus = 'F'
+             ) l2 on l2.l_orderkey = t1.l_orderkey
+          ) a
+          where
+           (count_suppkey > 1) or ((count_suppkey=1) and (l_suppkey <> max_suppkey))
+       ) l3 on l3.l_orderkey = t2.l_orderkey
+    ) b
+    where
+     (count_suppkey is null) or ((count_suppkey=1) and (l_suppkey = max_suppkey))
+  )c
+group by s_name
+order by numwait desc, s_name
+limit 100;
+
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q21_tmp1;
+DROP TABLE q21_tmp2;
+DROP TABLE q21_suppliers_who_kept_orders_waiting;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q22_global_sales_opportunity.hive b/hivesterix/hivesterix-dist/resource/tpch100/q22_global_sales_opportunity.hive
new file mode 100644
index 0000000..381aa9f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q22_global_sales_opportunity.hive
@@ -0,0 +1,69 @@
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+
+-- create target tables
+create table q22_customer_tmp(c_acctbal double, c_custkey int, cntrycode string);
+create table q22_customer_tmp1(avg_acctbal double);
+create table q22_orders_tmp(o_custkey int);
+create table q22_global_sales_opportunity(cntrycode string, numcust int, totacctbal double);
+
+-- the query
+insert overwrite table q22_customer_tmp
+select 
+  c_acctbal, c_custkey, substr(c_phone, 1, 2) as cntrycode
+from 
+  customer
+where 
+  substr(c_phone, 1, 2) = '13' or
+  substr(c_phone, 1, 2) = '31' or
+  substr(c_phone, 1, 2) = '23' or
+  substr(c_phone, 1, 2) = '29' or
+  substr(c_phone, 1, 2) = '30' or
+  substr(c_phone, 1, 2) = '18' or
+  substr(c_phone, 1, 2) = '17';
+ 
+insert overwrite table q22_customer_tmp1
+select
+  avg(c_acctbal)
+from
+  q22_customer_tmp
+where
+  c_acctbal > 0.00;
+
+insert overwrite table q22_orders_tmp
+select 
+  o_custkey 
+from 
+  orders
+group by 
+  o_custkey;
+
+insert overwrite table q22_global_sales_opportunity
+select
+  cntrycode, count(1) as numcust, sum(c_acctbal) as totacctbal
+from
+(
+  select cntrycode, c_acctbal, avg_acctbal from
+  q22_customer_tmp1 ct1 join
+  (
+    select cntrycode, c_acctbal from
+      q22_orders_tmp ot 
+      right outer join q22_customer_tmp ct 
+      on
+        ct.c_custkey = ot.o_custkey
+    where
+      o_custkey is null
+  ) ct2
+) a
+where
+  c_acctbal > avg_acctbal
+group by cntrycode
+order by cntrycode;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE q22_customer_tmp;
+DROP TABLE q22_customer_tmp1;
+DROP TABLE q22_orders_tmp;
+DROP TABLE q22_global_sales_opportunity;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q2_minimum_cost_supplier.hive b/hivesterix/hivesterix-dist/resource/tpch100/q2_minimum_cost_supplier.hive
new file mode 100644
index 0000000..afea998
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q2_minimum_cost_supplier.hive
@@ -0,0 +1,55 @@
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_size = 15 and p.p_type like '%BRASS' ;
+
+insert overwrite table q2_minimum_cost_supplier_tmp2 
+select 
+  p_partkey, min(ps_supplycost) 
+from  
+  q2_minimum_cost_supplier_tmp1 
+group by p_partkey;
+
+insert overwrite table q2_minimum_cost_supplier 
+select 
+  t1.s_acctbal, t1.s_name, t1.n_name, t1.p_partkey, t1.p_mfgr, t1.s_address, t1.s_phone, t1.s_comment 
+from 
+  q2_minimum_cost_supplier_tmp1 t1 join q2_minimum_cost_supplier_tmp2 t2 
+on 
+  t1.p_partkey = t2.p_partkey and t1.ps_supplycost=t2.ps_min_supplycost 
+order by s_acctbal desc, n_name, s_name, p_partkey 
+limit 100;
+
+DROP TABLE part;
+DROP TABLE supplier;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q2_minimum_cost_supplier;
+DROP TABLE q2_minimum_cost_supplier_tmp1;
+DROP TABLE q2_minimum_cost_supplier_tmp2;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q3_shipping_priority.hive b/hivesterix/hivesterix-dist/resource/tpch100/q3_shipping_priority.hive
new file mode 100644
index 0000000..9e82c99
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q3_shipping_priority.hive
@@ -0,0 +1,30 @@
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+Insert overwrite table q3_shipping_priority
+select
+  l_orderkey, sum(l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority
+from
+  customer c join orders o
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey
+  join lineitem l
+    on l.l_orderkey = o.o_orderkey
+where
+  o_orderdate < '1995-03-15' and l_shipdate > '1995-03-15'
+group by l_orderkey, o_orderdate, o_shippriority
+order by revenue desc, o_orderdate
+limit 10;
+
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE customer;
+DROP TABLE q3_shipping_priority;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q4_order_priority.hive b/hivesterix/hivesterix-dist/resource/tpch100/q4_order_priority.hive
new file mode 100644
index 0000000..decc493
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q4_order_priority.hive
@@ -0,0 +1,30 @@
+-- create tables and load data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+CREATE TABLE q4_order_priority_tmp (O_ORDERKEY INT);
+CREATE TABLE q4_order_priority (O_ORDERPRIORITY STRING, ORDER_COUNT INT);
+
+set mapred.min.split.size=536870912;
+-- the query
+INSERT OVERWRITE TABLE q4_order_priority_tmp 
+select 
+  DISTINCT l_orderkey 
+from 
+  lineitem 
+where 
+  l_commitdate < l_receiptdate;
+INSERT OVERWRITE TABLE q4_order_priority 
+select o_orderpriority, count(1) as order_count 
+from 
+  orders o join q4_order_priority_tmp t 
+  on 
+o.o_orderkey = t.o_orderkey and o.o_orderdate >= '1993-07-01' and o.o_orderdate < '1993-10-01' 
+group by o_orderpriority 
+order by o_orderpriority;
+
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE q4_order_priority_tmp;
+DROP TABLE q4_order_priority;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q5_local_supplier_volume.hive b/hivesterix/hivesterix-dist/resource/tpch100/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..bd10d75
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q5_local_supplier_volume.hive
@@ -0,0 +1,41 @@
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from orders o join
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from lineitem l join
+        ( select n_name, s_suppkey, s_nationkey from supplier s join
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey and r.r_name = 'ASIA'
+          ) n1 on s.s_nationkey = n1.n_nationkey
+        ) s1 on l.l_suppkey = s1.s_suppkey
+      ) l1 on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1994-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE q5_local_supplier_volume;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q6_forecast_revenue_change.hive b/hivesterix/hivesterix-dist/resource/tpch100/q6_forecast_revenue_change.hive
new file mode 100644
index 0000000..4840fb0
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q6_forecast_revenue_change.hive
@@ -0,0 +1,20 @@
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+
+-- create the target table
+create table q6_forecast_revenue_change (revenue double);
+
+-- the query
+insert overwrite table q6_forecast_revenue_change 
+select 
+  sum(l_extendedprice*l_discount) as revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+  and l_discount >= 0.05 and l_discount <= 0.07
+  and l_quantity < 24;
+
+DROP TABLE lineitem;
+DROP TABLE q6_forecast_revenue_change;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q7_volume_shipping.hive b/hivesterix/hivesterix-dist/resource/tpch100/q7_volume_shipping.hive
new file mode 100644
index 0000000..dd6b416
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q7_volume_shipping.hive
@@ -0,0 +1,70 @@
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the target table
+create table q7_volume_shipping (supp_nation string, cust_nation string, l_year int, revenue double);
+create table q7_volume_shipping_tmp(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q7_volume_shipping_tmp
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY'
+    UNION ALL
+select 
+  n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey, 
+  n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n2.n_name = 'FRANCE' and n1.n_name = 'GERMANY'
+) a;
+
+insert overwrite table q7_volume_shipping 
+select 
+  supp_nation, cust_nation, l_year, sum(volume) as revenue
+from 
+  (
+    select
+      supp_nation, cust_nation, year(l_shipdate) as l_year, 
+      l_extendedprice * (1 - l_discount) as volume
+    from
+      q7_volume_shipping_tmp t join
+        (select l_shipdate, l_extendedprice, l_discount, c_nationkey, s_nationkey 
+         from supplier s join
+           (select l_shipdate, l_extendedprice, l_discount, l_suppkey, c_nationkey 
+            from customer c join
+              (select l_shipdate, l_extendedprice, l_discount, l_suppkey, o_custkey 
+               from orders o join lineitem l 
+               on 
+                 o.o_orderkey = l.l_orderkey and l.l_shipdate >= '1995-01-01' 
+                 and l.l_shipdate <= '1996-12-31'
+               ) l1 on c.c_custkey = l1.o_custkey
+            ) l2 on s.s_suppkey = l2.l_suppkey
+         ) l3 on l3.c_nationkey = t.c_nationkey and l3.s_nationkey = t.s_nationkey
+   ) shipping
+group by supp_nation, cust_nation, l_year
+order by supp_nation, cust_nation, l_year;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE q7_volume_shipping;
+DROP TABLE q7_volume_shipping_tmp;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q8_national_market_share.hive b/hivesterix/hivesterix-dist/resource/tpch100/q8_national_market_share.hive
new file mode 100644
index 0000000..72d8b69
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q8_national_market_share.hive
@@ -0,0 +1,55 @@
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/region';
+
+-- create the result table
+create table q8_national_market_share(o_year string, mkt_share double);
+
+-- the query
+insert overwrite table q8_national_market_share 
+select 
+  o_year, sum(case when nation = 'BRAZIL' then volume else 0.0 end) / sum(volume) as mkt_share
+from 
+  (
+select 
+  year(o_orderdate) as o_year, l_extendedprice * (1-l_discount) as volume, 
+  n2.n_name as nation
+    from
+      nation n2 join
+        (select o_orderdate, l_discount, l_extendedprice, s_nationkey 
+         from supplier s join
+          (select o_orderdate, l_discount, l_extendedprice, l_suppkey 
+           from part p join
+             (select o_orderdate, l_partkey, l_discount, l_extendedprice, l_suppkey 
+              from lineitem l join
+                (select o_orderdate, o_orderkey 
+                 from orders o join
+                   (select c.c_custkey 
+                    from customer c join
+                      (select n1.n_nationkey 
+                       from nation n1 join region r
+                       on n1.n_regionkey = r.r_regionkey and r.r_name = 'AMERICA'
+                       ) n11 on c.c_nationkey = n11.n_nationkey
+                    ) c1 on c1.c_custkey = o.o_custkey
+                 ) o1 on l.l_orderkey = o1.o_orderkey and o1.o_orderdate >= '1995-01-01' 
+                         and o1.o_orderdate < '1996-12-31'
+              ) l1 on p.p_partkey = l1.l_partkey and p.p_type = 'ECONOMY ANODIZED STEEL'
+           ) p1 on s.s_suppkey = p1.l_suppkey
+        ) s1 on s1.s_nationkey = n2.n_nationkey
+  ) all_nation
+group by o_year
+order by o_year;
+
+DROP TABLE customer;
+DROP TABLE orders;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE nation;
+DROP TABLE region;
+DROP TABLE part;
+DROP TABLE q8_national_market_share;
diff --git a/hivesterix/hivesterix-dist/resource/tpch100/q9_product_type_profit.hive b/hivesterix/hivesterix-dist/resource/tpch100/q9_product_type_profit.hive
new file mode 100644
index 0000000..2519475
--- /dev/null
+++ b/hivesterix/hivesterix-dist/resource/tpch100/q9_product_type_profit.hive
@@ -0,0 +1,50 @@
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/100/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/100/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      orders o join
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
+
+DROP TABLE part;
+DROP TABLE lineitem;
+DROP TABLE supplier;
+DROP TABLE orders;
+DROP TABLE partsupp;
+DROP TABLE nation;
+DROP TABLE q9_product_type_profit;
diff --git a/hivesterix/hivesterix-dist/src/main/assembly/binary-assembly.xml b/hivesterix/hivesterix-dist/src/main/assembly/binary-assembly.xml
new file mode 100755
index 0000000..de3757f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/assembly/binary-assembly.xml
@@ -0,0 +1,26 @@
+<assembly>
+	<id>binary-assembly</id>
+	<formats>
+		<format>zip</format>
+		<format>dir</format>
+	</formats>
+	<includeBaseDirectory>false</includeBaseDirectory>
+	<fileSets>
+		<fileSet>
+			<directory>target/appassembler/bin</directory>
+			<outputDirectory>bin</outputDirectory>
+			<fileMode>0755</fileMode>
+		</fileSet>
+		<fileSet>
+			<directory>target/appassembler/lib</directory>
+			<outputDirectory>lib</outputDirectory>
+		</fileSet>
+		<fileSet>
+			<directory>target</directory>
+			<outputDirectory>lib</outputDirectory>
+			<includes>
+				<include>*.jar</include>
+			</includes>
+		</fileSet>
+	</fileSets>
+</assembly>
diff --git a/hivesterix/hivesterix-dist/src/main/java/edu/uci/ics/hivesterix/runtime/exec/HyracksExecutionEngine.java b/hivesterix/hivesterix-dist/src/main/java/edu/uci/ics/hivesterix/runtime/exec/HyracksExecutionEngine.java
new file mode 100644
index 0000000..379737f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/java/edu/uci/ics/hivesterix/runtime/exec/HyracksExecutionEngine.java
@@ -0,0 +1,596 @@
+package edu.uci.ics.hivesterix.runtime.exec;

+

+import java.io.BufferedReader;

+import java.io.FileInputStream;

+import java.io.InputStream;

+import java.io.InputStreamReader;

+import java.io.PrintWriter;

+import java.io.Serializable;

+import java.net.InetAddress;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+import java.util.Map.Entry;

+import java.util.Properties;

+import java.util.Set;

+

+import org.apache.commons.logging.Log;

+import org.apache.commons.logging.LogFactory;

+import org.apache.hadoop.hive.conf.HiveConf;

+import org.apache.hadoop.hive.ql.exec.ConditionalTask;

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.MapRedTask;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.TableScanOperator;

+import org.apache.hadoop.hive.ql.exec.Task;

+import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;

+import org.apache.hadoop.hive.ql.plan.FetchWork;

+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;

+import org.apache.hadoop.hive.ql.plan.MapredLocalWork;

+import org.apache.hadoop.hive.ql.plan.MapredWork;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+import org.apache.hadoop.hive.ql.plan.TableScanDesc;

+

+import edu.uci.ics.hivesterix.common.config.ConfUtil;

+import edu.uci.ics.hivesterix.logical.expression.HiveExpressionTypeComputer;

+import edu.uci.ics.hivesterix.logical.expression.HiveMergeAggregationExpressionFactory;

+import edu.uci.ics.hivesterix.logical.expression.HiveNullableTypeComputer;

+import edu.uci.ics.hivesterix.logical.expression.HivePartialAggregationTypeComputer;

+import edu.uci.ics.hivesterix.logical.plan.HiveAlgebricksTranslator;

+import edu.uci.ics.hivesterix.logical.plan.HiveLogicalPlanAndMetaData;

+import edu.uci.ics.hivesterix.optimizer.rulecollections.HiveRuleCollections;

+import edu.uci.ics.hivesterix.runtime.factory.evaluator.HiveExpressionRuntimeProvider;

+import edu.uci.ics.hivesterix.runtime.factory.nullwriter.HiveNullWriterFactory;

+import edu.uci.ics.hivesterix.runtime.inspector.HiveBinaryBooleanInspectorFactory;

+import edu.uci.ics.hivesterix.runtime.inspector.HiveBinaryIntegerInspectorFactory;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveConnectorPolicyAssignmentPolicy;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveConnectorPolicyAssignmentPolicy.Policy;

+import edu.uci.ics.hivesterix.runtime.provider.HiveBinaryComparatorFactoryProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HiveBinaryHashFunctionFactoryProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HiveBinaryHashFunctionFamilyProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HiveNormalizedKeyComputerFactoryProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HivePrinterFactoryProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HiveSerializerDeserializerProvider;

+import edu.uci.ics.hivesterix.runtime.provider.HiveTypeTraitProvider;

+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.compiler.api.HeuristicCompilerFactoryBuilder;

+import edu.uci.ics.hyracks.algebricks.compiler.api.HeuristicCompilerFactoryBuilder.DefaultOptimizationContextFactory;

+import edu.uci.ics.hyracks.algebricks.compiler.api.ICompiler;

+import edu.uci.ics.hyracks.algebricks.compiler.api.ICompilerFactory;

+import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialFixpointRuleController;

+import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialOnceRuleController;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlanAndMetadata;

+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;

+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;

+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AbstractRuleController;

+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;

+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;

+import edu.uci.ics.hyracks.api.client.HyracksConnection;

+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;

+import edu.uci.ics.hyracks.api.job.JobId;

+import edu.uci.ics.hyracks.api.job.JobSpecification;

+

+@SuppressWarnings({ "rawtypes", "unchecked" })

+public class HyracksExecutionEngine implements IExecutionEngine {

+

+    private static final Log LOG = LogFactory.getLog(HyracksExecutionEngine.class.getName());

+    private static final String clusterPropertiesPath = "conf/cluster.properties";

+    private static final String masterFilePath = "conf/master";

+

+    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> DEFAULT_LOGICAL_REWRITES = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();

+    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> DEFAULT_PHYSICAL_REWRITES = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();

+    static {

+        SequentialFixpointRuleController seqCtrlNoDfs = new SequentialFixpointRuleController(false);

+        SequentialFixpointRuleController seqCtrlFullDfs = new SequentialFixpointRuleController(true);

+        SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,

+                HiveRuleCollections.NORMALIZATION));

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,

+                HiveRuleCollections.COND_PUSHDOWN_AND_JOIN_INFERENCE));

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,

+                HiveRuleCollections.LOAD_FIELDS));

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,

+                HiveRuleCollections.OP_PUSHDOWN));

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,

+                HiveRuleCollections.DATA_EXCHANGE));

+        DEFAULT_LOGICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,

+                HiveRuleCollections.CONSOLIDATION));

+

+        DEFAULT_PHYSICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,

+                HiveRuleCollections.PHYSICAL_PLAN_REWRITES));

+        DEFAULT_PHYSICAL_REWRITES.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,

+                HiveRuleCollections.prepareJobGenRules));

+    }

+

+    /**

+     * static configurations for compiler

+     */

+    private HeuristicCompilerFactoryBuilder builder;

+

+    /**

+     * compiler

+     */

+    private ICompiler compiler;

+

+    /**

+     * physical optimization config

+     */

+    private PhysicalOptimizationConfig physicalOptimizationConfig;

+

+    /**

+     * final ending operators

+     */

+    private List<Operator> leaveOps = new ArrayList<Operator>();

+

+    /**

+     * tasks that are already visited

+     */

+    private Map<Task<? extends Serializable>, Boolean> tasksVisited = new HashMap<Task<? extends Serializable>, Boolean>();

+

+    /**

+     * hyracks job spec

+     */

+    private JobSpecification jobSpec;

+

+    /**

+     * hive configuration

+     */

+    private HiveConf conf;

+

+    /**

+     * plan printer

+     */

+    private PrintWriter planPrinter;

+

+    /**

+     * properties

+     */

+    private Properties clusterProps;

+

+    /**

+     * the Hyracks client connection

+     */

+    private IHyracksClientConnection hcc;

+

+    public HyracksExecutionEngine(HiveConf conf) {

+        this.conf = conf;

+        init(conf);

+    }

+

+    public HyracksExecutionEngine(HiveConf conf, PrintWriter planPrinter) {

+        this.conf = conf;

+        this.planPrinter = planPrinter;

+        init(conf);

+    }

+

+    private void init(HiveConf conf) {

+        builder = new HeuristicCompilerFactoryBuilder(DefaultOptimizationContextFactory.INSTANCE);

+        builder.setLogicalRewrites(DEFAULT_LOGICAL_REWRITES);

+        builder.setPhysicalRewrites(DEFAULT_PHYSICAL_REWRITES);

+        builder.setIMergeAggregationExpressionFactory(HiveMergeAggregationExpressionFactory.INSTANCE);

+        builder.setExpressionTypeComputer(HiveExpressionTypeComputer.INSTANCE);

+        builder.setNullableTypeComputer(HiveNullableTypeComputer.INSTANCE);

+

+        long memSizeExternalGby = conf.getLong("hive.algebricks.groupby.external.memory", 268435456);

+        long memSizeExternalSort = conf.getLong("hive.algebricks.sort.memory", 536870912);

+        int frameSize = conf.getInt("hive.algebricks.framesize", 32768);

+

+        physicalOptimizationConfig = new PhysicalOptimizationConfig();

+        int frameLimitExtGby = (int) (memSizeExternalGby / frameSize);

+        physicalOptimizationConfig.setMaxFramesExternalGroupBy(frameLimitExtGby);

+        int frameLimitExtSort = (int) (memSizeExternalSort / frameSize);

+        physicalOptimizationConfig.setMaxFramesExternalSort(frameLimitExtSort);

+        builder.setPhysicalOptimizationConfig(physicalOptimizationConfig);

+    }

+

+    @Override

+    public int compileJob(List<Task<? extends Serializable>> rootTasks) {

+        // clean up

+        leaveOps.clear();

+        tasksVisited.clear();

+        jobSpec = null;

+

+        HashMap<String, PartitionDesc> aliasToPath = new HashMap<String, PartitionDesc>();

+        List<Operator> rootOps = generateRootOperatorDAG(rootTasks, aliasToPath);

+

+        // get all leave Ops

+        getLeaves(rootOps, leaveOps);

+

+        HiveAlgebricksTranslator translator = new HiveAlgebricksTranslator();

+        try {

+            translator.translate(rootOps, null, aliasToPath);

+

+            ILogicalPlan plan = translator.genLogicalPlan();

+

+            if (plan.getRoots() != null && plan.getRoots().size() > 0 && plan.getRoots().get(0).getValue() != null) {

+                translator.printOperators();

+                ILogicalPlanAndMetadata planAndMetadata = new HiveLogicalPlanAndMetaData(plan,

+                        translator.getMetadataProvider());

+

+                ICompilerFactory compilerFactory = builder.create();

+                compiler = compilerFactory.createCompiler(planAndMetadata.getPlan(),

+                        planAndMetadata.getMetadataProvider(), translator.getVariableCounter());

+

+                // run optimization and re-writing rules for Hive plan

+                compiler.optimize();

+

+                // print optimized plan

+                LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();

+                StringBuilder buffer = new StringBuilder();

+                PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);

+                String planStr = buffer.toString();

+                System.out.println(planStr);

+

+                if (planPrinter != null)

+                    planPrinter.print(planStr);

+            }

+        } catch (Exception e) {

+            e.printStackTrace();

+            return 1;

+        }

+

+        return 0;

+    }

+

+    private void codeGen() throws AlgebricksException {

+        try {

+            // number of cpu cores in the cluster

+            builder.setClusterLocations(new AlgebricksAbsolutePartitionConstraint(ConfUtil.getNCs()));

+        } catch (Exception e) {

+            throw new AlgebricksException(e);

+        }

+        // builder.setClusterTopology(ConfUtil.getClusterTopology());

+        builder.setBinaryBooleanInspectorFactory(HiveBinaryBooleanInspectorFactory.INSTANCE);

+        builder.setBinaryIntegerInspectorFactory(HiveBinaryIntegerInspectorFactory.INSTANCE);

+        builder.setComparatorFactoryProvider(HiveBinaryComparatorFactoryProvider.INSTANCE);

+        builder.setExpressionRuntimeProvider(HiveExpressionRuntimeProvider.INSTANCE);

+        builder.setHashFunctionFactoryProvider(HiveBinaryHashFunctionFactoryProvider.INSTANCE);

+        builder.setPrinterProvider(HivePrinterFactoryProvider.INSTANCE);

+        builder.setSerializerDeserializerProvider(HiveSerializerDeserializerProvider.INSTANCE);

+        builder.setNullWriterFactory(HiveNullWriterFactory.INSTANCE);

+        builder.setNormalizedKeyComputerFactoryProvider(HiveNormalizedKeyComputerFactoryProvider.INSTANCE);

+        builder.setPartialAggregationTypeComputer(HivePartialAggregationTypeComputer.INSTANCE);

+        builder.setTypeTraitProvider(HiveTypeTraitProvider.INSTANCE);

+        builder.setHashFunctionFamilyProvider(HiveBinaryHashFunctionFamilyProvider.INSTANCE);

+

+        jobSpec = compiler.createJob(null);

+

+        // set the policy

+        String policyStr = conf.get("hive.hyracks.connectorpolicy");

+        if (policyStr == null)

+            policyStr = "PIPELINING";

+        Policy policyValue = Policy.valueOf(policyStr);

+        jobSpec.setConnectorPolicyAssignmentPolicy(new HiveConnectorPolicyAssignmentPolicy(policyValue));

+        jobSpec.setUseConnectorPolicyForScheduling(false);

+    }

+

+    @Override

+    public int executeJob() {

+        try {

+            codeGen();

+            executeHyracksJob(jobSpec);

+        } catch (Exception e) {

+            e.printStackTrace();

+            return 1;

+        }

+        return 0;

+    }

+

+    private List<Operator> generateRootOperatorDAG(List<Task<? extends Serializable>> rootTasks,

+            HashMap<String, PartitionDesc> aliasToPath) {

+

+        List<Operator> rootOps = new ArrayList<Operator>();

+        List<Task<? extends Serializable>> toDelete = new ArrayList<Task<? extends Serializable>>();

+        tasksVisited.clear();

+

+        for (int i = rootTasks.size() - 1; i >= 0; i--) {

+            /**

+             * list of map-reduce tasks

+             */

+            Task<? extends Serializable> task = rootTasks.get(i);

+

+            if (task instanceof MapRedTask) {

+                List<Operator> mapRootOps = articulateMapReduceOperators(task, rootOps, aliasToPath, rootTasks);

+                if (i == 0)

+                    rootOps.addAll(mapRootOps);

+                else {

+                    List<Operator> leaves = new ArrayList<Operator>();

+                    getLeaves(rootOps, leaves);

+

+                    List<Operator> mapChildren = new ArrayList<Operator>();

+                    for (Operator childMap : mapRootOps) {

+                        if (childMap instanceof TableScanOperator) {

+                            TableScanDesc topDesc = (TableScanDesc) childMap.getConf();

+                            if (topDesc == null)

+                                mapChildren.add(childMap);

+                            else {

+                                rootOps.add(childMap);

+                            }

+                        } else

+                            mapChildren.add(childMap);

+                    }

+

+                    if (mapChildren.size() > 0) {

+                        for (Operator leaf : leaves)

+                            leaf.setChildOperators(mapChildren);

+                        for (Operator child : mapChildren)

+                            child.setParentOperators(leaves);

+                    }

+                }

+

+                MapredWork mr = (MapredWork) task.getWork();

+                HashMap<String, PartitionDesc> map = mr.getAliasToPartnInfo();

+

+                addAliasToPartition(aliasToPath, map);

+                toDelete.add(task);

+            }

+        }

+

+        for (Task<? extends Serializable> task : toDelete)

+            rootTasks.remove(task);

+

+        return rootOps;

+    }

+

+    private void addAliasToPartition(HashMap<String, PartitionDesc> aliasToPath, HashMap<String, PartitionDesc> map) {

+        Iterator<String> keys = map.keySet().iterator();

+        while (keys.hasNext()) {

+            String key = keys.next();

+            PartitionDesc part = map.get(key);

+            String[] names = key.split(":");

+            for (String name : names) {

+                aliasToPath.put(name, part);

+            }

+        }

+    }

+

+    private List<Operator> articulateMapReduceOperators(Task task, List<Operator> rootOps,

+            HashMap<String, PartitionDesc> aliasToPath, List<Task<? extends Serializable>> rootTasks) {

+        // System.out.println("!"+task.getName());

+        if (!(task instanceof MapRedTask)) {

+            if (!(task instanceof ConditionalTask)) {

+                rootTasks.add(task);

+                return null;

+            } else {

+                // remove map-reduce branches in condition task

+                ConditionalTask condition = (ConditionalTask) task;

+                List<Task<? extends Serializable>> branches = condition.getListTasks();

+                for (int i = branches.size() - 1; i >= 0; i--) {

+                    Task branch = branches.get(i);

+                    if (branch instanceof MapRedTask) {

+                        return articulateMapReduceOperators(branch, rootOps, aliasToPath, rootTasks);

+                    }

+                }

+                rootTasks.add(task);

+                return null;

+            }

+        }

+

+        MapredWork mr = (MapredWork) task.getWork();

+        HashMap<String, PartitionDesc> map = mr.getAliasToPartnInfo();

+

+        // put all aliasToParitionDesc mapping into the map

+        addAliasToPartition(aliasToPath, map);

+

+        MapRedTask mrtask = (MapRedTask) task;

+        MapredWork work = (MapredWork) mrtask.getWork();

+        HashMap<String, Operator<? extends Serializable>> operators = work.getAliasToWork();

+

+        Set entries = operators.entrySet();

+        Iterator<Entry<String, Operator>> iterator = entries.iterator();

+        List<Operator> mapRootOps = new ArrayList<Operator>();

+

+        // get map root operators

+        while (iterator.hasNext()) {

+            Operator next = iterator.next().getValue();

+            if (!mapRootOps.contains(next)) {

+                // clear that only for the case of union

+                mapRootOps.add(next);

+            }

+        }

+

+        // get map local work

+        MapredLocalWork localWork = work.getMapLocalWork();

+        if (localWork != null) {

+            HashMap<String, Operator<? extends Serializable>> localOperators = localWork.getAliasToWork();

+

+            Set localEntries = localOperators.entrySet();

+            Iterator<Entry<String, Operator>> localIterator = localEntries.iterator();

+            while (localIterator.hasNext()) {

+                mapRootOps.add(localIterator.next().getValue());

+            }

+

+            HashMap<String, FetchWork> localFetch = localWork.getAliasToFetchWork();

+            Set localFetchEntries = localFetch.entrySet();

+            Iterator<Entry<String, FetchWork>> localFetchIterator = localFetchEntries.iterator();

+            while (localFetchIterator.hasNext()) {

+                Entry<String, FetchWork> fetchMap = localFetchIterator.next();

+                FetchWork fetch = fetchMap.getValue();

+                String alias = fetchMap.getKey();

+                List<PartitionDesc> dirPart = fetch.getPartDesc();

+

+                // temporary hack: put the first partitionDesc into the map

+                aliasToPath.put(alias, dirPart.get(0));

+            }

+        }

+

+        Boolean visited = tasksVisited.get(task);

+        if (visited != null && visited.booleanValue() == true) {

+            return mapRootOps;

+        }

+

+        // do that only for union operator

+        for (Operator op : mapRootOps)

+            if (op.getParentOperators() != null)

+                op.getParentOperators().clear();

+

+        List<Operator> mapLeaves = new ArrayList<Operator>();

+        downToLeaves(mapRootOps, mapLeaves);

+        List<Operator> reduceOps = new ArrayList<Operator>();

+

+        if (work.getReducer() != null)

+            reduceOps.add(work.getReducer());

+

+        for (Operator mapLeaf : mapLeaves) {

+            mapLeaf.setChildOperators(reduceOps);

+        }

+

+        for (Operator reduceOp : reduceOps) {

+            if (reduceOp != null)

+                reduceOp.setParentOperators(mapLeaves);

+        }

+

+        List<Operator> leafs = new ArrayList<Operator>();

+        if (reduceOps.size() > 0) {

+            downToLeaves(reduceOps, leafs);

+        } else {

+            leafs = mapLeaves;

+        }

+

+        List<Operator> mapChildren = new ArrayList<Operator>();

+        if (task.getChildTasks() != null && task.getChildTasks().size() > 0) {

+            for (Object child : task.getChildTasks()) {

+                List<Operator> childMapOps = articulateMapReduceOperators((Task) child, rootOps, aliasToPath, rootTasks);

+                if (childMapOps == null)

+                    continue;

+

+                for (Operator childMap : childMapOps) {

+                    if (childMap instanceof TableScanOperator) {

+                        TableScanDesc topDesc = (TableScanDesc) childMap.getConf();

+                        if (topDesc == null)

+                            mapChildren.add(childMap);

+                        else {

+                            rootOps.add(childMap);

+                        }

+                    } else {

+                        // if not table scan, add the child

+                        mapChildren.add(childMap);

+                    }

+                }

+            }

+

+            if (mapChildren.size() > 0) {

+                int i = 0;

+                for (Operator leaf : leafs) {

+                    if (leaf.getChildOperators() == null || leaf.getChildOperators().size() == 0)

+                        leaf.setChildOperators(new ArrayList<Operator>());

+                    leaf.getChildOperators().add(mapChildren.get(i));

+                    i++;

+                }

+                i = 0;

+                for (Operator child : mapChildren) {

+                    if (child.getParentOperators() == null || child.getParentOperators().size() == 0)

+                        child.setParentOperators(new ArrayList<Operator>());

+                    child.getParentOperators().add(leafs.get(i));

+                    i++;

+                }

+            }

+        }

+

+        // mark this task as visited

+        this.tasksVisited.put(task, true);

+        return mapRootOps;

+    }

+

+    /**

+     * down to leaf nodes

+     * 

+     * @param ops

+     * @param leaves

+     */

+    private void downToLeaves(List<Operator> ops, List<Operator> leaves) {

+

+        // Operator currentOp;

+        for (Operator op : ops) {

+            if (op != null && op.getChildOperators() != null && op.getChildOperators().size() > 0) {

+                downToLeaves(op.getChildOperators(), leaves);

+            } else {

+                if (op != null && leaves.indexOf(op) < 0)

+                    leaves.add(op);

+            }

+        }

+    }

+

+    private void getLeaves(List<Operator> roots, List<Operator> currentLeaves) {

+        for (Operator op : roots) {

+            List<Operator> children = op.getChildOperators();

+            if (children == null || children.size() <= 0) {

+                currentLeaves.add(op);

+            } else {

+                getLeaves(children, currentLeaves);

+            }

+        }

+    }

+

+    private void executeHyracksJob(JobSpecification job) throws Exception {

+

+        /**

+         * load the properties file if it is not loaded

+         */

+        if (clusterProps == null) {

+            clusterProps = new Properties();

+            InputStream confIn = new FileInputStream(clusterPropertiesPath);

+            clusterProps.load(confIn);

+            confIn.close();

+        }

+

+        if (hcc == null) {

+            BufferedReader ipReader = new BufferedReader(new InputStreamReader(new FileInputStream(masterFilePath)));

+            String masterNode = ipReader.readLine();

+            ipReader.close();

+

+            InetAddress[] ips = InetAddress.getAllByName(masterNode);

+            int port = Integer.parseInt(clusterProps.getProperty("CC_CLIENTPORT"));

+            for (InetAddress ip : ips) {

+                if (ip.getAddress().length <= 4) {

+                    try {

+                        hcc = new HyracksConnection(ip.getHostAddress(), port);

+                        break;

+                    } catch (Exception e) {

+                        continue;

+                    }

+                }

+            }

+        }

+

+        String applicationName = "hivesterix";

+        long start = System.currentTimeMillis();

+        JobId jobId = hcc.startJob(applicationName, job);

+        hcc.waitForCompletion(jobId);

+

+        // System.out.println("job finished: " + jobId.toString());

+        // call all leave nodes to end

+        for (Operator leaf : leaveOps) {

+            jobClose(leaf);

+        }

+

+        long end = System.currentTimeMillis();

+        System.err.println(start + " " + end + " " + (end - start));

+    }

+

+    /**

+     * mv to final directory on hdfs (not real final)

+     * 

+     * @param leaf

+     * @throws Exception

+     */

+    private void jobClose(Operator leaf) throws Exception {

+        FileSinkOperator fsOp = (FileSinkOperator) leaf;

+        FileSinkDesc desc = fsOp.getConf();

+        boolean isNativeTable = !desc.getTableInfo().isNonNative();

+        if ((conf != null) && isNativeTable) {

+            String specPath = desc.getDirName();

+            DynamicPartitionCtx dpCtx = desc.getDynPartCtx();

+            // for 0.7.0

+            fsOp.mvFileToFinalPath(specPath, conf, true, LOG, dpCtx);

+            // for 0.8.0

+            // Utilities.mvFileToFinalPath(specPath, conf, true, LOG, dpCtx,

+            // desc);

+        }

+    }

+}

diff --git a/hivesterix/hivesterix-dist/src/main/java/edu/uci/ics/hivesterix/runtime/exec/IExecutionEngine.java b/hivesterix/hivesterix-dist/src/main/java/edu/uci/ics/hivesterix/runtime/exec/IExecutionEngine.java
new file mode 100644
index 0000000..c64a39b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/java/edu/uci/ics/hivesterix/runtime/exec/IExecutionEngine.java
@@ -0,0 +1,25 @@
+package edu.uci.ics.hivesterix.runtime.exec;

+

+import java.io.Serializable;

+import java.util.List;

+

+import org.apache.hadoop.hive.ql.exec.Task;

+

+public interface IExecutionEngine {

+

+    /**

+     * compile the job

+     * 

+     * @param rootTasks

+     *            : Hive MapReduce plan

+     * @return 0 pass, 1 fail

+     */

+    public int compileJob(List<Task<? extends Serializable>> rootTasks);

+

+    /**

+     * execute the job with latest compiled plan

+     * 

+     * @return

+     */

+    public int executeJob();

+}

diff --git a/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/Driver.java b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/Driver.java
new file mode 100644
index 0000000..a385742
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/Driver.java
@@ -0,0 +1,1310 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql;
+
+import java.io.DataInput;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Queue;
+import java.util.Set;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Schema;
+import org.apache.hadoop.hive.ql.exec.ConditionalTask;
+import org.apache.hadoop.hive.ql.exec.ExecDriver;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.MapRedTask;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.StatsTask;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.exec.TaskResult;
+import org.apache.hadoop.hive.ql.exec.TaskRunner;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
+import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
+import org.apache.hadoop.hive.ql.hooks.Hook;
+import org.apache.hadoop.hive.ql.hooks.HookContext;
+import org.apache.hadoop.hive.ql.hooks.PostExecute;
+import org.apache.hadoop.hive.ql.hooks.PreExecute;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLock;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockManagerCtx;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockObj;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
+import org.apache.hadoop.hive.ql.lockmgr.LockException;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.DummyPartition;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.ErrorMsg;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContextImpl;
+import org.apache.hadoop.hive.ql.parse.ParseContext;
+import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.ParseUtils;
+import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import org.apache.hadoop.hive.ql.plan.ConditionalResolver;
+import org.apache.hadoop.hive.ql.plan.ConditionalResolverMergeFiles;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.processors.CommandProcessor;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.mapred.ClusterStatus;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import edu.uci.ics.hivesterix.runtime.exec.HyracksExecutionEngine;
+import edu.uci.ics.hivesterix.runtime.exec.IExecutionEngine;
+
+@SuppressWarnings({ "deprecation", "unused" })
+public class Driver implements CommandProcessor {
+
+    static final private Log LOG = LogFactory.getLog(Driver.class.getName());
+    static final private LogHelper console = new LogHelper(LOG);
+
+    // hive-sterix
+    private IExecutionEngine engine;
+    private boolean hivesterix = false;
+
+    private int maxRows = 100;
+    ByteStream.Output bos = new ByteStream.Output();
+
+    private HiveConf conf;
+    private DataInput resStream;
+    private Context ctx;
+    private QueryPlan plan;
+    private Schema schema;
+    private HiveLockManager hiveLockMgr;
+
+    private String errorMessage;
+    private String SQLState;
+
+    // A limit on the number of threads that can be launched
+    private int maxthreads;
+    private final int sleeptime = 2000;
+
+    protected int tryCount = Integer.MAX_VALUE;
+
+    private int checkLockManager() {
+        boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
+        if (supportConcurrency && (hiveLockMgr == null)) {
+            try {
+                setLockManager();
+            } catch (SemanticException e) {
+                errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage();
+                SQLState = ErrorMsg.findSQLState(e.getMessage());
+                console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+                return (12);
+            }
+        }
+        return (0);
+    }
+
+    private void setLockManager() throws SemanticException {
+        boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
+        if (supportConcurrency) {
+            String lockMgr = conf.getVar(HiveConf.ConfVars.HIVE_LOCK_MANAGER);
+            if ((lockMgr == null) || (lockMgr.isEmpty())) {
+                throw new SemanticException(ErrorMsg.LOCKMGR_NOT_SPECIFIED.getMsg());
+            }
+
+            try {
+                hiveLockMgr = (HiveLockManager) ReflectionUtils.newInstance(conf.getClassByName(lockMgr), conf);
+                hiveLockMgr.setContext(new HiveLockManagerCtx(conf));
+            } catch (Exception e) {
+                throw new SemanticException(ErrorMsg.LOCKMGR_NOT_INITIALIZED.getMsg() + e.getMessage());
+            }
+        }
+    }
+
+    public void init() {
+        Operator.resetId();
+    }
+
+    /**
+     * Return the status information about the Map-Reduce cluster
+     */
+    public ClusterStatus getClusterStatus() throws Exception {
+        ClusterStatus cs;
+        try {
+            JobConf job = new JobConf(conf, ExecDriver.class);
+            JobClient jc = new JobClient(job);
+            cs = jc.getClusterStatus();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw e;
+        }
+        LOG.info("Returning cluster status: " + cs.toString());
+        return cs;
+    }
+
+    public Schema getSchema() {
+        return schema;
+    }
+
+    /**
+     * Get a Schema with fields represented with native Hive types
+     */
+    public static Schema getSchema(BaseSemanticAnalyzer sem, HiveConf conf) {
+        Schema schema = null;
+
+        // If we have a plan, prefer its logical result schema if it's
+        // available; otherwise, try digging out a fetch task; failing that,
+        // give up.
+        if (sem == null) {
+            // can't get any info without a plan
+        } else if (sem.getResultSchema() != null) {
+            List<FieldSchema> lst = sem.getResultSchema();
+            schema = new Schema(lst, null);
+        } else if (sem.getFetchTask() != null) {
+            FetchTask ft = sem.getFetchTask();
+            TableDesc td = ft.getTblDesc();
+            // partitioned tables don't have tableDesc set on the FetchTask.
+            // Instead
+            // they have a list of PartitionDesc objects, each with a table
+            // desc.
+            // Let's
+            // try to fetch the desc for the first partition and use it's
+            // deserializer.
+            if (td == null && ft.getWork() != null && ft.getWork().getPartDesc() != null) {
+                if (ft.getWork().getPartDesc().size() > 0) {
+                    td = ft.getWork().getPartDesc().get(0).getTableDesc();
+                }
+            }
+
+            if (td == null) {
+                LOG.info("No returning schema.");
+            } else {
+                String tableName = "result";
+                List<FieldSchema> lst = null;
+                try {
+                    lst = MetaStoreUtils.getFieldsFromDeserializer(tableName, td.getDeserializer());
+                } catch (Exception e) {
+                    LOG.warn("Error getting schema: " + org.apache.hadoop.util.StringUtils.stringifyException(e));
+                }
+                if (lst != null) {
+                    schema = new Schema(lst, null);
+                }
+            }
+        }
+        if (schema == null) {
+            schema = new Schema();
+        }
+        LOG.info("Returning Hive schema: " + schema);
+        return schema;
+    }
+
+    /**
+     * Get a Schema with fields represented with Thrift DDL types
+     */
+    public Schema getThriftSchema() throws Exception {
+        Schema schema;
+        try {
+            schema = getSchema();
+            if (schema != null) {
+                List<FieldSchema> lst = schema.getFieldSchemas();
+                // Go over the schema and convert type to thrift type
+                if (lst != null) {
+                    for (FieldSchema f : lst) {
+                        f.setType(MetaStoreUtils.typeToThriftType(f.getType()));
+                    }
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw e;
+        }
+        LOG.info("Returning Thrift schema: " + schema);
+        return schema;
+    }
+
+    /**
+     * Return the maximum number of rows returned by getResults
+     */
+    public int getMaxRows() {
+        return maxRows;
+    }
+
+    /**
+     * Set the maximum number of rows returned by getResults
+     */
+    public void setMaxRows(int maxRows) {
+        this.maxRows = maxRows;
+    }
+
+    public boolean hasReduceTasks(List<Task<? extends Serializable>> tasks) {
+        if (tasks == null) {
+            return false;
+        }
+
+        boolean hasReduce = false;
+        for (Task<? extends Serializable> task : tasks) {
+            if (task.hasReduce()) {
+                return true;
+            }
+
+            hasReduce = (hasReduce || hasReduceTasks(task.getChildTasks()));
+        }
+        return hasReduce;
+    }
+
+    /**
+     * for backwards compatibility with current tests
+     */
+    public Driver(HiveConf conf) {
+        this.conf = conf;
+
+        // hivesterix
+        engine = new HyracksExecutionEngine(conf);
+    }
+
+    public Driver() {
+        if (SessionState.get() != null) {
+            conf = SessionState.get().getConf();
+        }
+
+        // hivesterix
+        engine = new HyracksExecutionEngine(conf);
+    }
+
+    // hivesterix: plan printer
+    public Driver(HiveConf conf, PrintWriter planPrinter) {
+        this.conf = conf;
+        engine = new HyracksExecutionEngine(conf, planPrinter);
+    }
+
+    public void clear() {
+        this.hivesterix = false;
+    }
+
+    /**
+     * Compile a new query. Any currently-planned query associated with this
+     * Driver is discarded.
+     * 
+     * @param command
+     *            The SQL query to compile.
+     */
+    public int compile(String command) {
+        if (plan != null) {
+            close();
+            plan = null;
+        }
+
+        TaskFactory.resetId();
+
+        try {
+            command = new VariableSubstitution().substitute(conf, command);
+            ctx = new Context(conf);
+
+            ParseDriver pd = new ParseDriver();
+            ASTNode tree = pd.parse(command, ctx);
+            tree = ParseUtils.findRootNonNullToken(tree);
+
+            BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
+            List<AbstractSemanticAnalyzerHook> saHooks = getSemanticAnalyzerHooks();
+
+            // Do semantic analysis and plan generation
+            if (saHooks != null) {
+                HiveSemanticAnalyzerHookContext hookCtx = new HiveSemanticAnalyzerHookContextImpl();
+                hookCtx.setConf(conf);
+                for (AbstractSemanticAnalyzerHook hook : saHooks) {
+                    tree = hook.preAnalyze(hookCtx, tree);
+                }
+                sem.analyze(tree, ctx);
+                for (AbstractSemanticAnalyzerHook hook : saHooks) {
+                    hook.postAnalyze(hookCtx, sem.getRootTasks());
+                }
+            } else {
+                sem.analyze(tree, ctx);
+            }
+
+            LOG.info("Semantic Analysis Completed");
+
+            // validate the plan
+            sem.validate();
+
+            plan = new QueryPlan(command, sem);
+            // initialize FetchTask right here
+            if (plan.getFetchTask() != null) {
+                plan.getFetchTask().initialize(conf, plan, null);
+            }
+
+            // get the output schema
+            schema = getSchema(sem, conf);
+
+            // test Only - serialize the query plan and deserialize it
+            if (sem instanceof SemanticAnalyzer && command.toLowerCase().indexOf("create") < 0) {
+
+                Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+
+                String queryPlanFileName = ctx.getLocalScratchDir(true) + Path.SEPARATOR_CHAR + "queryplan.xml";
+                LOG.info("query plan = " + queryPlanFileName);
+                queryPlanFileName = new Path(queryPlanFileName).toUri().getPath();
+
+                // serialize the queryPlan
+                FileOutputStream fos = new FileOutputStream(queryPlanFileName);
+                Utilities.serializeQueryPlan(plan, fos);
+                fos.close();
+
+                // deserialize the queryPlan
+                FileInputStream fis = new FileInputStream(queryPlanFileName);
+                QueryPlan newPlan = Utilities.deserializeQueryPlan(fis, conf);
+                fis.close();
+
+                // Use the deserialized plan
+                plan = newPlan;
+            }
+
+            // initialize FetchTask right here
+            if (plan.getFetchTask() != null) {
+                plan.getFetchTask().initialize(conf, plan, null);
+            }
+
+            // do the authorization check
+            if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+                try {
+                    // doAuthorization(sem);
+                } catch (AuthorizationException authExp) {
+                    console.printError("Authorization failed:" + authExp.getMessage()
+                            + ". Use show grant to get more details.");
+                    return 403;
+                }
+            }
+
+            // hyracks run
+            if (sem instanceof SemanticAnalyzer && command.toLowerCase().indexOf("create") < 0) {
+                hivesterix = true;
+                return engine.compileJob(sem.getRootTasks());
+            }
+
+            return 0;
+        } catch (SemanticException e) {
+            errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage();
+            SQLState = ErrorMsg.findSQLState(e.getMessage());
+            console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (10);
+        } catch (ParseException e) {
+            errorMessage = "FAILED: Parse Error: " + e.getMessage();
+            SQLState = ErrorMsg.findSQLState(e.getMessage());
+            console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (11);
+        } catch (Exception e) {
+            errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
+            SQLState = ErrorMsg.findSQLState(e.getMessage());
+            console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (12);
+        }
+    }
+
+    private void doAuthorization(BaseSemanticAnalyzer sem) throws HiveException, AuthorizationException {
+        HashSet<ReadEntity> inputs = sem.getInputs();
+        HashSet<WriteEntity> outputs = sem.getOutputs();
+        SessionState ss = SessionState.get();
+        HiveOperation op = ss.getHiveOperation();
+        Hive db = sem.getDb();
+        if (op != null) {
+            if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) {
+                ss.getAuthorizer().authorize(db.getDatabase(db.getCurrentDatabase()), null,
+                        HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
+            } else {
+                // if (op.equals(HiveOperation.IMPORT)) {
+                // ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
+                // if (!isa.existsTable()) {
+                ss.getAuthorizer().authorize(db.getDatabase(db.getCurrentDatabase()), null,
+                        HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
+                // }
+                // }
+            }
+            if (outputs != null && outputs.size() > 0) {
+                for (WriteEntity write : outputs) {
+
+                    if (write.getType() == WriteEntity.Type.PARTITION) {
+                        Partition part = db.getPartition(write.getTable(), write.getPartition().getSpec(), false);
+                        if (part != null) {
+                            ss.getAuthorizer().authorize(write.getPartition(), null, op.getOutputRequiredPrivileges());
+                            continue;
+                        }
+                    }
+
+                    if (write.getTable() != null) {
+                        ss.getAuthorizer().authorize(write.getTable(), null, op.getOutputRequiredPrivileges());
+                    }
+                }
+
+            }
+        }
+
+        if (inputs != null && inputs.size() > 0) {
+
+            Map<Table, List<String>> tab2Cols = new HashMap<Table, List<String>>();
+            Map<Partition, List<String>> part2Cols = new HashMap<Partition, List<String>>();
+
+            Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
+            for (ReadEntity read : inputs) {
+                if (read.getPartition() != null) {
+                    Table tbl = read.getTable();
+                    String tblName = tbl.getTableName();
+                    if (tableUsePartLevelAuth.get(tblName) == null) {
+                        boolean usePartLevelPriv = (tbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE"
+                                .equalsIgnoreCase(tbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))));
+                        if (usePartLevelPriv) {
+                            tableUsePartLevelAuth.put(tblName, Boolean.TRUE);
+                        } else {
+                            tableUsePartLevelAuth.put(tblName, Boolean.FALSE);
+                        }
+                    }
+                }
+            }
+
+            if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.QUERY)) {
+                SemanticAnalyzer querySem = (SemanticAnalyzer) sem;
+                ParseContext parseCtx = querySem.getParseContext();
+                Map<TableScanOperator, Table> tsoTopMap = parseCtx.getTopToTable();
+
+                for (Map.Entry<String, Operator<? extends Serializable>> topOpMap : querySem.getParseContext()
+                        .getTopOps().entrySet()) {
+                    Operator<? extends Serializable> topOp = topOpMap.getValue();
+                    if (topOp instanceof TableScanOperator && tsoTopMap.containsKey(topOp)) {
+                        TableScanOperator tableScanOp = (TableScanOperator) topOp;
+                        Table tbl = tsoTopMap.get(tableScanOp);
+                        List<Integer> neededColumnIds = tableScanOp.getNeededColumnIDs();
+                        List<FieldSchema> columns = tbl.getCols();
+                        List<String> cols = new ArrayList<String>();
+                        if (neededColumnIds != null && neededColumnIds.size() > 0) {
+                            for (int i = 0; i < neededColumnIds.size(); i++) {
+                                cols.add(columns.get(neededColumnIds.get(i)).getName());
+                            }
+                        } else {
+                            for (int i = 0; i < columns.size(); i++) {
+                                cols.add(columns.get(i).getName());
+                            }
+                        }
+                        if (tbl.isPartitioned() && tableUsePartLevelAuth.get(tbl.getTableName())) {
+                            String alias_id = topOpMap.getKey();
+                            PrunedPartitionList partsList = PartitionPruner.prune(parseCtx.getTopToTable().get(topOp),
+                                    parseCtx.getOpToPartPruner().get(topOp), parseCtx.getConf(), alias_id,
+                                    parseCtx.getPrunedPartitions());
+                            Set<Partition> parts = new HashSet<Partition>();
+                            parts.addAll(partsList.getConfirmedPartns());
+                            parts.addAll(partsList.getUnknownPartns());
+                            for (Partition part : parts) {
+                                List<String> existingCols = part2Cols.get(part);
+                                if (existingCols == null) {
+                                    existingCols = new ArrayList<String>();
+                                }
+                                existingCols.addAll(cols);
+                                part2Cols.put(part, existingCols);
+                            }
+                        } else {
+                            List<String> existingCols = tab2Cols.get(tbl);
+                            if (existingCols == null) {
+                                existingCols = new ArrayList<String>();
+                            }
+                            existingCols.addAll(cols);
+                            tab2Cols.put(tbl, existingCols);
+                        }
+                    }
+                }
+            }
+
+            // cache the results for table authorization
+            Set<String> tableAuthChecked = new HashSet<String>();
+            for (ReadEntity read : inputs) {
+                Table tbl = null;
+                if (read.getPartition() != null) {
+                    tbl = read.getPartition().getTable();
+                    // use partition level authorization
+                    if (tableUsePartLevelAuth.get(tbl.getTableName())) {
+                        List<String> cols = part2Cols.get(read.getPartition());
+                        if (cols != null && cols.size() > 0) {
+                            ss.getAuthorizer().authorize(read.getPartition().getTable(), read.getPartition(), cols,
+                                    op.getInputRequiredPrivileges(), null);
+                        } else {
+                            ss.getAuthorizer().authorize(read.getPartition(), op.getInputRequiredPrivileges(), null);
+                        }
+                        continue;
+                    }
+                } else if (read.getTable() != null) {
+                    tbl = read.getTable();
+                }
+
+                // if we reach here, it means it needs to do a table
+                // authorization
+                // check, and the table authorization may already happened
+                // because of other
+                // partitions
+                if (tbl != null && !tableAuthChecked.contains(tbl.getTableName())) {
+                    List<String> cols = tab2Cols.get(tbl);
+                    if (cols != null && cols.size() > 0) {
+                        ss.getAuthorizer().authorize(tbl, null, cols, op.getInputRequiredPrivileges(), null);
+                    } else {
+                        ss.getAuthorizer().authorize(tbl, op.getInputRequiredPrivileges(), null);
+                    }
+                    tableAuthChecked.add(tbl.getTableName());
+                }
+            }
+
+        }
+    }
+
+    /**
+     * @return The current query plan associated with this Driver, if any.
+     */
+    public QueryPlan getPlan() {
+        return plan;
+    }
+
+    /**
+     * @param t
+     *            The table to be locked
+     * @param p
+     *            The partition to be locked
+     * @param mode
+     *            The mode of the lock (SHARED/EXCLUSIVE) Get the list of
+     *            objects to be locked. If a partition needs to be locked (in
+     *            any mode), all its parents should also be locked in SHARED
+     *            mode.
+     **/
+    private List<HiveLockObj> getLockObjects(Table t, Partition p, HiveLockMode mode) throws SemanticException {
+        List<HiveLockObj> locks = new LinkedList<HiveLockObj>();
+
+        HiveLockObjectData lockData = new HiveLockObjectData(plan.getQueryId(), String.valueOf(System
+                .currentTimeMillis()), "IMPLICIT");
+
+        if (t != null) {
+            locks.add(new HiveLockObj(new HiveLockObject(t, lockData), mode));
+            mode = HiveLockMode.SHARED;
+            locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(), lockData), mode));
+            return locks;
+        }
+
+        if (p != null) {
+            if (!(p instanceof DummyPartition)) {
+                locks.add(new HiveLockObj(new HiveLockObject(p, lockData), mode));
+            }
+
+            // All the parents are locked in shared mode
+            mode = HiveLockMode.SHARED;
+
+            // For dummy partitions, only partition name is needed
+            String name = p.getName();
+
+            if (p instanceof DummyPartition) {
+                name = p.getName().split("@")[2];
+            }
+
+            String partName = name;
+            String partialName = "";
+            String[] partns = name.split("/");
+            int len = p instanceof DummyPartition ? partns.length : partns.length - 1;
+            for (int idx = 0; idx < len; idx++) {
+                String partn = partns[idx];
+                partialName += partn;
+                try {
+                    locks.add(new HiveLockObj(new HiveLockObject(new DummyPartition(p.getTable(), p.getTable()
+                            .getDbName() + "/" + p.getTable().getTableName() + "/" + partialName), lockData), mode));
+                    partialName += "/";
+                } catch (HiveException e) {
+                    throw new SemanticException(e.getMessage());
+                }
+            }
+
+            locks.add(new HiveLockObj(new HiveLockObject(p.getTable(), lockData), mode));
+            locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode));
+        }
+        return locks;
+    }
+
+    /**
+     * Acquire read and write locks needed by the statement. The list of objects
+     * to be locked are obtained from he inputs and outputs populated by the
+     * compiler. The lock acuisition scheme is pretty simple. If all the locks
+     * cannot be obtained, error out. Deadlock is avoided by making sure that
+     * the locks are lexicographically sorted.
+     **/
+    public int acquireReadWriteLocks() {
+        try {
+            int sleepTime = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES) * 1000;
+            int numRetries = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES);
+
+            boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
+            if (!supportConcurrency) {
+                return 0;
+            }
+
+            List<HiveLockObj> lockObjects = new ArrayList<HiveLockObj>();
+
+            // Sort all the inputs, outputs.
+            // If a lock needs to be acquired on any partition, a read lock
+            // needs to be acquired on all
+            // its parents also
+            for (ReadEntity input : plan.getInputs()) {
+                if (input.getType() == ReadEntity.Type.TABLE) {
+                    lockObjects.addAll(getLockObjects(input.getTable(), null, HiveLockMode.SHARED));
+                } else {
+                    lockObjects.addAll(getLockObjects(null, input.getPartition(), HiveLockMode.SHARED));
+                }
+            }
+
+            for (WriteEntity output : plan.getOutputs()) {
+                if (output.getTyp() == WriteEntity.Type.TABLE) {
+                    lockObjects.addAll(getLockObjects(output.getTable(), null,
+                            output.isComplete() ? HiveLockMode.EXCLUSIVE : HiveLockMode.SHARED));
+                } else if (output.getTyp() == WriteEntity.Type.PARTITION) {
+                    lockObjects.addAll(getLockObjects(null, output.getPartition(), HiveLockMode.EXCLUSIVE));
+                }
+                // In case of dynamic queries, it is possible to have incomplete
+                // dummy partitions
+                else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) {
+                    lockObjects.addAll(getLockObjects(null, output.getPartition(), HiveLockMode.SHARED));
+                }
+            }
+
+            if (lockObjects.isEmpty() && !ctx.isNeedLockMgr()) {
+                return 0;
+            }
+
+            int ret = checkLockManager();
+            if (ret != 0) {
+                return ret;
+            }
+
+            HiveLockObjectData lockData = new HiveLockObjectData(plan.getQueryId(), String.valueOf(System
+                    .currentTimeMillis()), "IMPLICIT");
+
+            // Lock the database also
+            try {
+                Hive db = Hive.get(conf);
+                lockObjects.add(new HiveLockObj(new HiveLockObject(db.getCurrentDatabase(), lockData),
+                        HiveLockMode.SHARED));
+            } catch (HiveException e) {
+                throw new SemanticException(e.getMessage());
+            }
+
+            ctx.setHiveLockMgr(hiveLockMgr);
+            List<HiveLock> hiveLocks = null;
+
+            int tryNum = 1;
+            do {
+
+                // ctx.getHiveLockMgr();
+                // hiveLocks = ctx.getHiveLockMgr().lock(lockObjects, false);
+
+                if (hiveLocks != null) {
+                    break;
+                }
+
+                tryNum++;
+                try {
+                    Thread.sleep(sleepTime);
+                } catch (InterruptedException e) {
+                }
+            } while (tryNum < numRetries);
+
+            if (hiveLocks == null) {
+                throw new SemanticException(ErrorMsg.LOCK_CANNOT_BE_ACQUIRED.getMsg());
+            } else {
+                ctx.setHiveLocks(hiveLocks);
+            }
+
+            return (0);
+        } catch (SemanticException e) {
+            errorMessage = "FAILED: Error in acquiring locks: " + e.getMessage();
+            SQLState = ErrorMsg.findSQLState(e.getMessage());
+            console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (10);
+        } catch (Exception e) {
+            errorMessage = "FAILED: Error in acquiring locks: " + e.getMessage();
+            SQLState = ErrorMsg.findSQLState(e.getMessage());
+            console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (10);
+        }
+    }
+
+    /**
+     * Release all the locks acquired implicitly by the statement. Note that the
+     * locks acquired with 'keepAlive' set to True are not released.
+     **/
+    private void releaseLocks() {
+        if (ctx != null && ctx.getHiveLockMgr() != null) {
+            try {
+                ctx.getHiveLockMgr().close();
+                ctx.setHiveLocks(null);
+            } catch (LockException e) {
+            }
+        }
+    }
+
+    /**
+     * @param hiveLocks
+     *            list of hive locks to be released Release all the locks
+     *            specified. If some of the locks have already been released,
+     *            ignore them
+     **/
+    private void releaseLocks(List<HiveLock> hiveLocks) {
+        if (hiveLocks != null) {
+            ctx.getHiveLockMgr().releaseLocks(hiveLocks);
+        }
+        ctx.setHiveLocks(null);
+    }
+
+    public CommandProcessorResponse run(String command) {
+        errorMessage = null;
+        SQLState = null;
+
+        int ret = compile(command);
+        if (ret != 0) {
+            // releaseLocks(ctx.getHiveLocks());
+            return new CommandProcessorResponse(ret, errorMessage, SQLState);
+        }
+
+        // ret = acquireReadWriteLocks();
+        if (ret != 0) {
+            // releaseLocks(ctx.getHiveLocks());
+            return new CommandProcessorResponse(ret, errorMessage, SQLState);
+        }
+
+        ret = execute();
+        if (ret != 0) {
+            // releaseLocks(ctx.getHiveLocks());
+            return new CommandProcessorResponse(ret, errorMessage, SQLState);
+        }
+
+        // releaseLocks(ctx.getHiveLocks());
+        return new CommandProcessorResponse(ret);
+    }
+
+    private List<AbstractSemanticAnalyzerHook> getSemanticAnalyzerHooks() throws Exception {
+        ArrayList<AbstractSemanticAnalyzerHook> saHooks = new ArrayList<AbstractSemanticAnalyzerHook>();
+        String pestr = conf.getVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK);
+        if (pestr == null) {
+            return saHooks;
+        }
+        pestr = pestr.trim();
+        if (pestr.equals("")) {
+            return saHooks;
+        }
+
+        String[] peClasses = pestr.split(",");
+
+        for (String peClass : peClasses) {
+            try {
+                AbstractSemanticAnalyzerHook hook = HiveUtils.getSemanticAnalyzerHook(conf, peClass);
+                saHooks.add(hook);
+            } catch (HiveException e) {
+                console.printError("Pre Exec Hook Class not found:" + e.getMessage());
+                throw e;
+            }
+        }
+
+        return saHooks;
+    }
+
+    private List<Hook> getPreExecHooks() throws Exception {
+        ArrayList<Hook> pehooks = new ArrayList<Hook>();
+        String pestr = conf.getVar(HiveConf.ConfVars.PREEXECHOOKS);
+        pestr = pestr.trim();
+        if (pestr.equals("")) {
+            return pehooks;
+        }
+
+        String[] peClasses = pestr.split(",");
+
+        for (String peClass : peClasses) {
+            try {
+                pehooks.add((Hook) Class.forName(peClass.trim(), true, JavaUtils.getClassLoader()).newInstance());
+            } catch (ClassNotFoundException e) {
+                console.printError("Pre Exec Hook Class not found:" + e.getMessage());
+                throw e;
+            }
+        }
+
+        return pehooks;
+    }
+
+    private List<Hook> getPostExecHooks() throws Exception {
+        ArrayList<Hook> pehooks = new ArrayList<Hook>();
+        String pestr = conf.getVar(HiveConf.ConfVars.POSTEXECHOOKS);
+        pestr = pestr.trim();
+        if (pestr.equals("")) {
+            return pehooks;
+        }
+
+        String[] peClasses = pestr.split(",");
+
+        for (String peClass : peClasses) {
+            try {
+                pehooks.add((Hook) Class.forName(peClass.trim(), true, JavaUtils.getClassLoader()).newInstance());
+            } catch (ClassNotFoundException e) {
+                console.printError("Post Exec Hook Class not found:" + e.getMessage());
+                throw e;
+            }
+        }
+
+        return pehooks;
+    }
+
+    public int execute() {
+        // execute hivesterix plan
+        if (hivesterix) {
+            hivesterix = false;
+            int ret = engine.executeJob();
+            if (ret != 0)
+                return ret;
+        }
+
+        boolean noName = StringUtils.isEmpty(conf.getVar(HiveConf.ConfVars.HADOOPJOBNAME));
+        int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH);
+
+        String queryId = plan.getQueryId();
+        String queryStr = plan.getQueryStr();
+
+        conf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId);
+        conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, queryStr);
+        maxthreads = HiveConf.getIntVar(conf, HiveConf.ConfVars.EXECPARALLETHREADNUMBER);
+
+        try {
+            LOG.info("Starting command: " + queryStr);
+
+            plan.setStarted();
+
+            if (SessionState.get() != null) {
+                SessionState.get().getHiveHistory().startQuery(queryStr, conf.getVar(HiveConf.ConfVars.HIVEQUERYID));
+                SessionState.get().getHiveHistory().logPlanProgress(plan);
+            }
+            resStream = null;
+
+            HookContext hookContext = new HookContext(plan, conf);
+
+            for (Hook peh : getPreExecHooks()) {
+                if (peh instanceof ExecuteWithHookContext) {
+                    ((ExecuteWithHookContext) peh).run(hookContext);
+                } else if (peh instanceof PreExecute) {
+                    ((PreExecute) peh).run(SessionState.get(), plan.getInputs(), plan.getOutputs(), ShimLoader
+                            .getHadoopShims().getUGIForConf(conf));
+                }
+            }
+
+            int jobs = Utilities.getMRTasks(plan.getRootTasks()).size();
+            if (jobs > 0) {
+                console.printInfo("Total MapReduce jobs = " + jobs);
+            }
+            if (SessionState.get() != null) {
+                SessionState.get().getHiveHistory()
+                        .setQueryProperty(queryId, Keys.QUERY_NUM_TASKS, String.valueOf(jobs));
+                SessionState.get().getHiveHistory().setIdToTableMap(plan.getIdToTableNameMap());
+            }
+            String jobname = Utilities.abbreviate(queryStr, maxlen - 6);
+
+            // A runtime that launches runnable tasks as separate Threads
+            // through
+            // TaskRunners
+            // As soon as a task isRunnable, it is put in a queue
+            // At any time, at most maxthreads tasks can be running
+            // The main thread polls the TaskRunners to check if they have
+            // finished.
+
+            Queue<Task<? extends Serializable>> runnable = new LinkedList<Task<? extends Serializable>>();
+            Map<TaskResult, TaskRunner> running = new HashMap<TaskResult, TaskRunner>();
+
+            DriverContext driverCxt = new DriverContext(runnable, ctx);
+
+            // Add root Tasks to runnable
+
+            for (Task<? extends Serializable> tsk : plan.getRootTasks()) {
+                driverCxt.addToRunnable(tsk);
+            }
+
+            // Loop while you either have tasks running, or tasks queued up
+
+            while (running.size() != 0 || runnable.peek() != null) {
+                // Launch upto maxthreads tasks
+                while (runnable.peek() != null && running.size() < maxthreads) {
+                    Task<? extends Serializable> tsk = runnable.remove();
+                    console.printInfo("executing task " + tsk.getName());
+                    launchTask(tsk, queryId, noName, running, jobname, jobs, driverCxt);
+                }
+
+                // poll the Tasks to see which one completed
+                TaskResult tskRes = pollTasks(running.keySet());
+                TaskRunner tskRun = running.remove(tskRes);
+                Task<? extends Serializable> tsk = tskRun.getTask();
+                hookContext.addCompleteTask(tskRun);
+
+                int exitVal = tskRes.getExitVal();
+                if (exitVal != 0) {
+                    Task<? extends Serializable> backupTask = tsk.getAndInitBackupTask();
+                    if (backupTask != null) {
+                        errorMessage = "FAILED: Execution Error, return code " + exitVal + " from "
+                                + tsk.getClass().getName();
+                        console.printError(errorMessage);
+
+                        errorMessage = "ATTEMPT: Execute BackupTask: " + backupTask.getClass().getName();
+                        console.printError(errorMessage);
+
+                        // add backup task to runnable
+                        if (DriverContext.isLaunchable(backupTask)) {
+                            driverCxt.addToRunnable(backupTask);
+                        }
+                        continue;
+
+                    } else {
+                        // TODO: This error messaging is not very informative.
+                        // Fix that.
+                        errorMessage = "FAILED: Execution Error, return code " + exitVal + " from "
+                                + tsk.getClass().getName();
+                        SQLState = "08S01";
+                        console.printError(errorMessage);
+                        if (running.size() != 0) {
+                            taskCleanup();
+                        }
+                        // in case we decided to run everything in local mode,
+                        // restore the
+                        // the jobtracker setting to its initial value
+                        ctx.restoreOriginalTracker();
+                        return 9;
+                    }
+                }
+
+                if (SessionState.get() != null) {
+                    SessionState.get().getHiveHistory()
+                            .setTaskProperty(queryId, tsk.getId(), Keys.TASK_RET_CODE, String.valueOf(exitVal));
+                    SessionState.get().getHiveHistory().endTask(queryId, tsk);
+                }
+
+                if (tsk.getChildTasks() != null) {
+                    for (Task<? extends Serializable> child : tsk.getChildTasks()) {
+                        // hivesterix: don't check launchable condition
+                        // if (DriverContext.isLaunchable(child)) {
+                        driverCxt.addToRunnable(child);
+                        // }
+                    }
+                }
+            }
+
+            // in case we decided to run everything in local mode, restore the
+            // the jobtracker setting to its initial value
+            ctx.restoreOriginalTracker();
+
+            // remove incomplete outputs.
+            // Some incomplete outputs may be added at the beginning, for eg:
+            // for dynamic partitions.
+            // remove them
+            HashSet<WriteEntity> remOutputs = new HashSet<WriteEntity>();
+            for (WriteEntity output : plan.getOutputs()) {
+                if (!output.isComplete()) {
+                    remOutputs.add(output);
+                }
+            }
+
+            for (WriteEntity output : remOutputs) {
+                plan.getOutputs().remove(output);
+            }
+
+            // Get all the post execution hooks and execute them.
+            for (Hook peh : getPostExecHooks()) {
+                if (peh instanceof ExecuteWithHookContext) {
+                    ((ExecuteWithHookContext) peh).run(hookContext);
+                } else if (peh instanceof PostExecute) {
+                    ((PostExecute) peh)
+                            .run(SessionState.get(), plan.getInputs(), plan.getOutputs(),
+                                    (SessionState.get() != null ? SessionState.get().getLineageState().getLineageInfo()
+                                            : null), ShimLoader.getHadoopShims().getUGIForConf(conf));
+                }
+            }
+
+            if (SessionState.get() != null) {
+                SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_RET_CODE, String.valueOf(0));
+                SessionState.get().getHiveHistory().printRowCount(queryId);
+            }
+        } catch (Exception e) {
+            if (SessionState.get() != null) {
+                SessionState.get().getHiveHistory().setQueryProperty(queryId, Keys.QUERY_RET_CODE, String.valueOf(12));
+            }
+            // TODO: do better with handling types of Exception here
+            errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e);
+            SQLState = "08S01";
+            console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return (12);
+        } finally {
+            if (SessionState.get() != null) {
+                SessionState.get().getHiveHistory().endQuery(queryId);
+            }
+            if (noName) {
+                conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, "");
+            }
+        }
+        plan.setDone();
+
+        if (SessionState.get() != null) {
+            try {
+                SessionState.get().getHiveHistory().logPlanProgress(plan);
+            } catch (Exception e) {
+            }
+        }
+        console.printInfo("OK");
+
+        return (0);
+    }
+
+    /**
+     * Launches a new task
+     * 
+     * @param tsk
+     *            task being launched
+     * @param queryId
+     *            Id of the query containing the task
+     * @param noName
+     *            whether the task has a name set
+     * @param running
+     *            map from taskresults to taskrunners
+     * @param jobname
+     *            name of the task, if it is a map-reduce job
+     * @param jobs
+     *            number of map-reduce jobs
+     * @param curJobNo
+     *            the sequential number of the next map-reduce job
+     * @return the updated number of last the map-reduce job launched
+     */
+
+    public void launchTask(Task<? extends Serializable> tsk, String queryId, boolean noName,
+            Map<TaskResult, TaskRunner> running, String jobname, int jobs, DriverContext cxt) {
+
+        if (SessionState.get() != null) {
+            SessionState.get().getHiveHistory().startTask(queryId, tsk, tsk.getClass().getName());
+        }
+        if (tsk.isMapRedTask() && !(tsk instanceof ConditionalTask)) {
+            if (noName) {
+                conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, jobname + "(" + tsk.getId() + ")");
+            }
+            cxt.incCurJobNo(1);
+            console.printInfo("Launching Job " + cxt.getCurJobNo() + " out of " + jobs);
+        }
+        tsk.initialize(conf, plan, cxt);
+        TaskResult tskRes = new TaskResult();
+        TaskRunner tskRun = new TaskRunner(tsk, tskRes);
+
+        // HiveConf.getBoolVar(conf, HiveConf.ConfVars.EXECPARALLEL) &&
+        // Launch Task: hivesterix tweak
+        if (tsk instanceof MapRedTask || tsk instanceof StatsTask) {
+            // Launch it in the parallel mode, as a separate thread only for MR
+            // tasks
+            tskRes.setRunning(false);
+            tskRes.setExitVal(0);
+        } else if (tsk instanceof ConditionalTask) {
+            ConditionalTask condTask = (ConditionalTask) tsk;
+            ConditionalResolver crs = condTask.getResolver();
+            if (crs instanceof ConditionalResolverMergeFiles) {
+                tskRes.setRunning(false);
+                tskRes.setExitVal(0);
+
+                List<Task<? extends Serializable>> children = condTask.getListTasks();
+                for (Task<? extends Serializable> child : children)
+                    if (child instanceof MapRedTask)
+                        cxt.addToRunnable(child);
+            }
+        } else {
+            tskRun.runSequential();
+        }
+        running.put(tskRes, tskRun);
+        return;
+    }
+
+    /**
+     * Cleans up remaining tasks in case of failure
+     */
+
+    public void taskCleanup() {
+        // The currently existing Shutdown hooks will be automatically called,
+        // killing the map-reduce processes.
+        // The non MR processes will be killed as well.
+        System.exit(9);
+    }
+
+    /**
+     * Polls running tasks to see if a task has ended.
+     * 
+     * @param results
+     *            Set of result objects for running tasks
+     * @return The result object for any completed/failed task
+     */
+
+    public TaskResult pollTasks(Set<TaskResult> results) {
+        Iterator<TaskResult> resultIterator = results.iterator();
+        while (true) {
+            while (resultIterator.hasNext()) {
+                TaskResult tskRes = resultIterator.next();
+                if (tskRes.isRunning() == false) {
+                    return tskRes;
+                }
+            }
+
+            // In this loop, nothing was found
+            // Sleep 10 seconds and restart
+            try {
+                Thread.sleep(sleeptime);
+            } catch (InterruptedException ie) {
+                // Do Nothing
+                ;
+            }
+            resultIterator = results.iterator();
+        }
+    }
+
+    public boolean getResults(ArrayList<String> res) throws IOException {
+        if (plan != null && plan.getFetchTask() != null) {
+            FetchTask ft = plan.getFetchTask();
+            ft.setMaxRows(maxRows);
+            return ft.fetch(res);
+        }
+
+        if (resStream == null) {
+            resStream = ctx.getStream();
+        }
+        if (resStream == null) {
+            return false;
+        }
+
+        int numRows = 0;
+        String row = null;
+
+        while (numRows < maxRows) {
+            if (resStream == null) {
+                if (numRows > 0) {
+                    return true;
+                } else {
+                    return false;
+                }
+            }
+
+            bos.reset();
+            Utilities.StreamStatus ss;
+            try {
+                ss = Utilities.readColumn(resStream, bos);
+                if (bos.getCount() > 0) {
+                    row = new String(bos.getData(), 0, bos.getCount(), "UTF-8");
+                } else if (ss == Utilities.StreamStatus.TERMINATED) {
+                    row = new String();
+                }
+
+                if (row != null) {
+                    numRows++;
+                    res.add(row);
+                }
+            } catch (IOException e) {
+                console.printError("FAILED: Unexpected IO exception : " + e.getMessage());
+                res = null;
+                return false;
+            }
+
+            if (ss == Utilities.StreamStatus.EOF) {
+                resStream = ctx.getStream();
+            }
+        }
+        return true;
+    }
+
+    public int close() {
+        try {
+            if (plan != null) {
+                FetchTask fetchTask = plan.getFetchTask();
+                if (null != fetchTask) {
+                    try {
+                        fetchTask.clearFetch();
+                    } catch (Exception e) {
+                        LOG.debug(" Exception while clearing the Fetch task ", e);
+                    }
+                }
+            }
+            if (ctx != null) {
+                ctx.clear();
+            }
+            if (null != resStream) {
+                try {
+                    ((FSDataInputStream) resStream).close();
+                } catch (Exception e) {
+                    LOG.debug(" Exception while closing the resStream ", e);
+                }
+            }
+        } catch (Exception e) {
+            console.printError("FAILED: Hive Internal Error: " + Utilities.getNameMessage(e) + "\n"
+                    + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            return 13;
+        }
+
+        return 0;
+    }
+
+    public void destroy() {
+        releaseLocks();
+    }
+
+    public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan() throws IOException {
+        return plan.getQueryPlan();
+    }
+
+    public int getTryCount() {
+        return tryCount;
+    }
+
+    public void setTryCount(int tryCount) {
+        this.tryCount = tryCount;
+    }
+}
diff --git a/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
new file mode 100644
index 0000000..0f445f4
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.util.StringUtils;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * GenericUDAFAverage.
+ */
+@Description(name = "avg", value = "_FUNC_(x) - Returns the mean of a set of numbers")
+public class GenericUDAFAverage extends AbstractGenericUDAFResolver {
+
+    static final Log LOG = LogFactory.getLog(GenericUDAFAverage.class.getName());
+
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        if (parameters.length != 1) {
+            throw new UDFArgumentTypeException(parameters.length - 1, "Exactly one argument is expected.");
+        }
+
+        if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+                    + parameters[0].getTypeName() + " is passed.");
+        }
+        switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+            case BYTE:
+            case SHORT:
+            case INT:
+            case LONG:
+            case FLOAT:
+            case DOUBLE:
+            case STRING:
+                return new GenericUDAFAverageEvaluator();
+            case BOOLEAN:
+            default:
+                throw new UDFArgumentTypeException(0, "Only numeric or string type arguments are accepted but "
+                        + parameters[0].getTypeName() + " is passed.");
+        }
+    }
+
+    /**
+     * GenericUDAFAverageEvaluator.
+     */
+    public static class GenericUDAFAverageEvaluator extends GenericUDAFEvaluator {
+
+        // For PARTIAL1 and COMPLETE
+        PrimitiveObjectInspector inputOI;
+
+        // For PARTIAL2 and FINAL
+        StructObjectInspector soi;
+        StructField countField;
+        StructField sumField;
+        LongObjectInspector countFieldOI;
+        DoubleObjectInspector sumFieldOI;
+
+        // For PARTIAL1 and PARTIAL2
+        Object[] partialResult;
+
+        // For FINAL and COMPLETE
+        DoubleWritable result;
+
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            super.init(m, parameters);
+
+            // init input
+            if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+                inputOI = (PrimitiveObjectInspector) parameters[0];
+            } else {
+                soi = (StructObjectInspector) parameters[0];
+                countField = soi.getStructFieldRef("count");
+                sumField = soi.getStructFieldRef("sum");
+                countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
+                sumFieldOI = (DoubleObjectInspector) sumField.getFieldObjectInspector();
+            }
+
+            // init output
+            if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+                // The output of a partial aggregation is a struct containing
+                // a "long" count and a "double" sum.
+
+                ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+                foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                ArrayList<String> fname = new ArrayList<String>();
+                fname.add("count");
+                fname.add("sum");
+                partialResult = new Object[2];
+                partialResult[0] = new LongWritable(0);
+                partialResult[1] = new DoubleWritable(0);
+                return ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+
+            } else {
+                result = new DoubleWritable(0);
+                return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+            }
+        }
+
+        static class AverageAgg implements SerializableBuffer {
+            long count;
+            double sum;
+
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                count = BufferSerDeUtil.getLong(data, start);
+                start += 8;
+                sum = BufferSerDeUtil.getDouble(data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeLong(count, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(sum, data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeLong(count);
+                output.writeDouble(sum);
+            }
+        };
+
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            AverageAgg result = new AverageAgg();
+            reset(result);
+            return result;
+        }
+
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            AverageAgg myagg = (AverageAgg) agg;
+            myagg.count = 0;
+            myagg.sum = 0;
+        }
+
+        boolean warned = false;
+
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            Object p = parameters[0];
+            if (p != null) {
+                AverageAgg myagg = (AverageAgg) agg;
+                try {
+                    double v = PrimitiveObjectInspectorUtils.getDouble(p, inputOI);
+                    myagg.count++;
+                    myagg.sum += v;
+                } catch (NumberFormatException e) {
+                    if (!warned) {
+                        warned = true;
+                        LOG.warn(getClass().getSimpleName() + " " + StringUtils.stringifyException(e));
+                        LOG.warn(getClass().getSimpleName() + " ignoring similar exceptions.");
+                    }
+                }
+            }
+        }
+
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            AverageAgg myagg = (AverageAgg) agg;
+            ((LongWritable) partialResult[0]).set(myagg.count);
+            ((DoubleWritable) partialResult[1]).set(myagg.sum);
+            return partialResult;
+        }
+
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                AverageAgg myagg = (AverageAgg) agg;
+                Object partialCount = soi.getStructFieldData(partial, countField);
+                Object partialSum = soi.getStructFieldData(partial, sumField);
+                myagg.count += countFieldOI.get(partialCount);
+                myagg.sum += sumFieldOI.get(partialSum);
+            }
+        }
+
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            AverageAgg myagg = (AverageAgg) agg;
+            if (myagg.count == 0) {
+                return null;
+            } else {
+                result.set(myagg.sum / myagg.count);
+                return result;
+            }
+        }
+    }
+
+}
diff --git a/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
new file mode 100644
index 0000000..2c4022e
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCorrelation.java
@@ -0,0 +1,392 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * Compute the Pearson correlation coefficient corr(x, y), using the following
+ * stable one-pass method, based on: "Formulas for Robust, One-Pass Parallel
+ * Computation of Covariances and Arbitrary-Order Statistical Moments", Philippe
+ * Pebay, Sandia Labs and
+ * "The Art of Computer Programming, volume 2: Seminumerical Algorithms", Donald
+ * Knuth.
+ * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg> my_n =
+ * my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n - mx_(n-1))*(y_n
+ * - my_n) : <covariance * n> vx_n = vx_(n-1) + (x_n - mx_n)(x_n - mx_(n-1)):
+ * <variance * n> vy_n = vy_(n-1) + (y_n - my_n)(y_n - my_(n-1)): <variance * n>
+ * Merge: c_(A,B) = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/(n_A+n_B)
+ * vx_(A,B) = vx_A + vx_B + (mx_A - mx_B)*(mx_A - mx_B)*n_A*n_B/(n_A+n_B)
+ * vy_(A,B) = vy_A + vy_B + (my_A - my_B)*(my_A - my_B)*n_A*n_B/(n_A+n_B)
+ */
+@Description(name = "corr", value = "_FUNC_(x,y) - Returns the Pearson coefficient of correlation\n"
+        + "between a set of number pairs", extended = "The function takes as arguments any pair of numeric types and returns a double.\n"
+        + "Any pair with a NULL is ignored. If the function is applied to an empty set or\n"
+        + "a singleton set, NULL will be returned. Otherwise, it computes the following:\n"
+        + "   COVAR_POP(x,y)/(STDDEV_POP(x)*STDDEV_POP(y))\n"
+        + "where neither x nor y is null,\n"
+        + "COVAR_POP is the population covariance,\n" + "and STDDEV_POP is the population standard deviation.")
+public class GenericUDAFCorrelation extends AbstractGenericUDAFResolver {
+
+    static final Log LOG = LogFactory.getLog(GenericUDAFCorrelation.class.getName());
+
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        if (parameters.length != 2) {
+            throw new UDFArgumentTypeException(parameters.length - 1, "Exactly two arguments are expected.");
+        }
+
+        if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+                    + parameters[0].getTypeName() + " is passed.");
+        }
+
+        if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but "
+                    + parameters[1].getTypeName() + " is passed.");
+        }
+
+        switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+            case BYTE:
+            case SHORT:
+            case INT:
+            case LONG:
+            case FLOAT:
+            case DOUBLE:
+                switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) {
+                    case BYTE:
+                    case SHORT:
+                    case INT:
+                    case LONG:
+                    case FLOAT:
+                    case DOUBLE:
+                        return new GenericUDAFCorrelationEvaluator();
+                    case STRING:
+                    case BOOLEAN:
+                    default:
+                        throw new UDFArgumentTypeException(1, "Only numeric type arguments are accepted but "
+                                + parameters[1].getTypeName() + " is passed.");
+                }
+            case STRING:
+            case BOOLEAN:
+            default:
+                throw new UDFArgumentTypeException(0, "Only numeric type arguments are accepted but "
+                        + parameters[0].getTypeName() + " is passed.");
+        }
+    }
+
+    /**
+     * Evaluate the Pearson correlation coefficient using a stable one-pass
+     * algorithm, based on work by Philippe Pébay and Donald Knuth.
+     * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg>
+     * my_n = my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n -
+     * mx_(n-1))*(y_n - my_n) : <covariance * n> vx_n = vx_(n-1) + (x_n -
+     * mx_n)(x_n - mx_(n-1)): <variance * n> vy_n = vy_(n-1) + (y_n - my_n)(y_n
+     * - my_(n-1)): <variance * n>
+     * Merge: c_X = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/n_X vx_(A,B)
+     * = vx_A + vx_B + (mx_A - mx_B)*(mx_A - mx_B)*n_A*n_B/(n_A+n_B) vy_(A,B) =
+     * vy_A + vy_B + (my_A - my_B)*(my_A - my_B)*n_A*n_B/(n_A+n_B)
+     */
+    public static class GenericUDAFCorrelationEvaluator extends GenericUDAFEvaluator {
+
+        // For PARTIAL1 and COMPLETE
+        private PrimitiveObjectInspector xInputOI;
+        private PrimitiveObjectInspector yInputOI;
+
+        // For PARTIAL2 and FINAL
+        private StructObjectInspector soi;
+        private StructField countField;
+        private StructField xavgField;
+        private StructField yavgField;
+        private StructField xvarField;
+        private StructField yvarField;
+        private StructField covarField;
+        private LongObjectInspector countFieldOI;
+        private DoubleObjectInspector xavgFieldOI;
+        private DoubleObjectInspector yavgFieldOI;
+        private DoubleObjectInspector xvarFieldOI;
+        private DoubleObjectInspector yvarFieldOI;
+        private DoubleObjectInspector covarFieldOI;
+
+        // For PARTIAL1 and PARTIAL2
+        private Object[] partialResult;
+
+        // For FINAL and COMPLETE
+        private DoubleWritable result;
+
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            super.init(m, parameters);
+
+            // init input
+            if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+                assert (parameters.length == 2);
+                xInputOI = (PrimitiveObjectInspector) parameters[0];
+                yInputOI = (PrimitiveObjectInspector) parameters[1];
+            } else {
+                assert (parameters.length == 1);
+                soi = (StructObjectInspector) parameters[0];
+
+                countField = soi.getStructFieldRef("count");
+                xavgField = soi.getStructFieldRef("xavg");
+                yavgField = soi.getStructFieldRef("yavg");
+                xvarField = soi.getStructFieldRef("xvar");
+                yvarField = soi.getStructFieldRef("yvar");
+                covarField = soi.getStructFieldRef("covar");
+
+                countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
+                xavgFieldOI = (DoubleObjectInspector) xavgField.getFieldObjectInspector();
+                yavgFieldOI = (DoubleObjectInspector) yavgField.getFieldObjectInspector();
+                xvarFieldOI = (DoubleObjectInspector) xvarField.getFieldObjectInspector();
+                yvarFieldOI = (DoubleObjectInspector) yvarField.getFieldObjectInspector();
+                covarFieldOI = (DoubleObjectInspector) covarField.getFieldObjectInspector();
+            }
+
+            // init output
+            if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+                // The output of a partial aggregation is a struct containing
+                // a long count, two double averages, two double variances,
+                // and a double covariance.
+
+                ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+
+                foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+                ArrayList<String> fname = new ArrayList<String>();
+                fname.add("count");
+                fname.add("xavg");
+                fname.add("yavg");
+                fname.add("xvar");
+                fname.add("yvar");
+                fname.add("covar");
+
+                partialResult = new Object[6];
+                partialResult[0] = new LongWritable(0);
+                partialResult[1] = new DoubleWritable(0);
+                partialResult[2] = new DoubleWritable(0);
+                partialResult[3] = new DoubleWritable(0);
+                partialResult[4] = new DoubleWritable(0);
+                partialResult[5] = new DoubleWritable(0);
+
+                return ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+
+            } else {
+                setResult(new DoubleWritable(0));
+                return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+            }
+        }
+
+        static class StdAgg implements SerializableBuffer {
+            long count; // number n of elements
+            double xavg; // average of x elements
+            double yavg; // average of y elements
+            double xvar; // n times the variance of x elements
+            double yvar; // n times the variance of y elements
+            double covar; // n times the covariance
+
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                count = BufferSerDeUtil.getLong(data, start);
+                start += 8;
+                xavg = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                yavg = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                xvar = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                yvar = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                covar = BufferSerDeUtil.getDouble(data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeLong(count, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(xavg, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(yavg, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(xvar, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(yvar, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(covar, data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeLong(count);
+                output.writeDouble(xavg);
+                output.writeDouble(yavg);
+                output.writeDouble(xvar);
+                output.writeDouble(yvar);
+                output.writeDouble(covar);
+            }
+        };
+
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            StdAgg result = new StdAgg();
+            reset(result);
+            return result;
+        }
+
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            myagg.count = 0;
+            myagg.xavg = 0;
+            myagg.yavg = 0;
+            myagg.xvar = 0;
+            myagg.yvar = 0;
+            myagg.covar = 0;
+        }
+
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 2);
+            Object px = parameters[0];
+            Object py = parameters[1];
+            if (px != null && py != null) {
+                StdAgg myagg = (StdAgg) agg;
+                double vx = PrimitiveObjectInspectorUtils.getDouble(px, xInputOI);
+                double vy = PrimitiveObjectInspectorUtils.getDouble(py, yInputOI);
+                double xavgOld = myagg.xavg;
+                double yavgOld = myagg.yavg;
+                myagg.count++;
+                myagg.xavg += (vx - xavgOld) / myagg.count;
+                myagg.yavg += (vy - yavgOld) / myagg.count;
+                if (myagg.count > 1) {
+                    myagg.covar += (vx - xavgOld) * (vy - myagg.yavg);
+                    myagg.xvar += (vx - xavgOld) * (vx - myagg.xavg);
+                    myagg.yvar += (vy - yavgOld) * (vy - myagg.yavg);
+                }
+            }
+        }
+
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            ((LongWritable) partialResult[0]).set(myagg.count);
+            ((DoubleWritable) partialResult[1]).set(myagg.xavg);
+            ((DoubleWritable) partialResult[2]).set(myagg.yavg);
+            ((DoubleWritable) partialResult[3]).set(myagg.xvar);
+            ((DoubleWritable) partialResult[4]).set(myagg.yvar);
+            ((DoubleWritable) partialResult[5]).set(myagg.covar);
+            return partialResult;
+        }
+
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                StdAgg myagg = (StdAgg) agg;
+
+                Object partialCount = soi.getStructFieldData(partial, countField);
+                Object partialXAvg = soi.getStructFieldData(partial, xavgField);
+                Object partialYAvg = soi.getStructFieldData(partial, yavgField);
+                Object partialXVar = soi.getStructFieldData(partial, xvarField);
+                Object partialYVar = soi.getStructFieldData(partial, yvarField);
+                Object partialCovar = soi.getStructFieldData(partial, covarField);
+
+                long nA = myagg.count;
+                long nB = countFieldOI.get(partialCount);
+
+                if (nA == 0) {
+                    // Just copy the information since there is nothing so far
+                    myagg.count = countFieldOI.get(partialCount);
+                    myagg.xavg = xavgFieldOI.get(partialXAvg);
+                    myagg.yavg = yavgFieldOI.get(partialYAvg);
+                    myagg.xvar = xvarFieldOI.get(partialXVar);
+                    myagg.yvar = yvarFieldOI.get(partialYVar);
+                    myagg.covar = covarFieldOI.get(partialCovar);
+                }
+
+                if (nA != 0 && nB != 0) {
+                    // Merge the two partials
+                    double xavgA = myagg.xavg;
+                    double yavgA = myagg.yavg;
+                    double xavgB = xavgFieldOI.get(partialXAvg);
+                    double yavgB = yavgFieldOI.get(partialYAvg);
+                    double xvarB = xvarFieldOI.get(partialXVar);
+                    double yvarB = yvarFieldOI.get(partialYVar);
+                    double covarB = covarFieldOI.get(partialCovar);
+
+                    myagg.count += nB;
+                    myagg.xavg = (xavgA * nA + xavgB * nB) / myagg.count;
+                    myagg.yavg = (yavgA * nA + yavgB * nB) / myagg.count;
+                    myagg.xvar += xvarB + (xavgA - xavgB) * (xavgA - xavgB) * myagg.count;
+                    myagg.yvar += yvarB + (yavgA - yavgB) * (yavgA - yavgB) * myagg.count;
+                    myagg.covar += covarB + (xavgA - xavgB) * (yavgA - yavgB) * ((double) (nA * nB) / myagg.count);
+                }
+            }
+        }
+
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+
+            if (myagg.count < 2) { // SQL standard - return null for zero or one
+                                   // pair
+                return null;
+            } else {
+                getResult().set(myagg.covar / java.lang.Math.sqrt(myagg.xvar) / java.lang.Math.sqrt(myagg.yvar));
+                return getResult();
+            }
+        }
+
+        public void setResult(DoubleWritable result) {
+            this.result = result;
+        }
+
+        public DoubleWritable getResult() {
+            return result;
+        }
+    }
+
+}
diff --git a/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
new file mode 100644
index 0000000..dc5eef0
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * This class implements the COUNT aggregation function as in SQL.
+ */
+@Description(name = "count", value = "_FUNC_(*) - Returns the total number of retrieved rows, including "
+        + "rows containing NULL values.\n"
+
+        + "_FUNC_(expr) - Returns the number of rows for which the supplied " + "expression is non-NULL.\n"
+
+        + "_FUNC_(DISTINCT expr[, expr...]) - Returns the number of rows for "
+        + "which the supplied expression(s) are unique and non-NULL.")
+public class GenericUDAFCount implements GenericUDAFResolver2 {
+
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        // This method implementation is preserved for backward compatibility.
+        return new GenericUDAFCountEvaluator();
+    }
+
+    @Override
+    public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo paramInfo) throws SemanticException {
+
+        TypeInfo[] parameters = paramInfo.getParameters();
+
+        if (parameters.length == 0) {
+            if (!paramInfo.isAllColumns()) {
+                throw new UDFArgumentException("Argument expected");
+            }
+            assert !paramInfo.isDistinct() : "DISTINCT not supported with *";
+        } else {
+            if (parameters.length > 1 && !paramInfo.isDistinct()) {
+                throw new UDFArgumentException("DISTINCT keyword must be specified");
+            }
+            assert !paramInfo.isAllColumns() : "* not supported in expression list";
+        }
+
+        return new GenericUDAFCountEvaluator().setCountAllColumns(paramInfo.isAllColumns());
+    }
+
+    /**
+     * GenericUDAFCountEvaluator.
+     */
+    public static class GenericUDAFCountEvaluator extends GenericUDAFEvaluator {
+        private boolean countAllColumns = false;
+        private LongObjectInspector partialCountAggOI;
+        private LongWritable result;
+
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            super.init(m, parameters);
+            partialCountAggOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+            result = new LongWritable(0);
+            return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+        }
+
+        private GenericUDAFCountEvaluator setCountAllColumns(boolean countAllCols) {
+            countAllColumns = countAllCols;
+            return this;
+        }
+
+        /** class for storing count value. */
+        static class CountAgg implements SerializableBuffer {
+            long value;
+
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                value = BufferSerDeUtil.getLong(data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeLong(value, data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeLong(value);
+            }
+        }
+
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            CountAgg buffer = new CountAgg();
+            reset(buffer);
+            return buffer;
+        }
+
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            ((CountAgg) agg).value = 0;
+        }
+
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            // parameters == null means the input table/split is empty
+            if (parameters == null) {
+                return;
+            }
+            if (countAllColumns) {
+                assert parameters.length == 0;
+                ((CountAgg) agg).value++;
+            } else {
+                assert parameters.length > 0;
+                boolean countThisRow = true;
+                for (Object nextParam : parameters) {
+                    if (nextParam == null) {
+                        countThisRow = false;
+                        break;
+                    }
+                }
+                if (countThisRow) {
+                    ((CountAgg) agg).value++;
+                }
+            }
+        }
+
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                long p = partialCountAggOI.get(partial);
+                ((CountAgg) agg).value += p;
+            }
+        }
+
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            result.set(((CountAgg) agg).value);
+            return result;
+        }
+
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            return terminate(agg);
+        }
+    }
+}
diff --git a/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
new file mode 100644
index 0000000..0c4448b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCovariance.java
@@ -0,0 +1,341 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * Compute the covariance covar_pop(x, y), using the following one-pass method
+ * (ref. "Formulas for Robust, One-Pass Parallel Computation of Covariances and
+ * Arbitrary-Order Statistical Moments", Philippe Pebay, Sandia Labs):
+ * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg> my_n =
+ * my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n - mx_(n-1))*(y_n
+ * - my_n) : <covariance * n>
+ * Merge: c_X = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/n_X
+ */
+@Description(name = "covariance,covar_pop", value = "_FUNC_(x,y) - Returns the population covariance of a set of number pairs", extended = "The function takes as arguments any pair of numeric types and returns a double.\n"
+        + "Any pair with a NULL is ignored. If the function is applied to an empty set, NULL\n"
+        + "will be returned. Otherwise, it computes the following:\n"
+        + "   (SUM(x*y)-SUM(x)*SUM(y)/COUNT(x,y))/COUNT(x,y)\n" + "where neither x nor y is null.")
+public class GenericUDAFCovariance extends AbstractGenericUDAFResolver {
+
+    static final Log LOG = LogFactory.getLog(GenericUDAFCovariance.class.getName());
+
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        if (parameters.length != 2) {
+            throw new UDFArgumentTypeException(parameters.length - 1, "Exactly two arguments are expected.");
+        }
+
+        if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+                    + parameters[0].getTypeName() + " is passed.");
+        }
+
+        if (parameters[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but "
+                    + parameters[1].getTypeName() + " is passed.");
+        }
+
+        switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+            case BYTE:
+            case SHORT:
+            case INT:
+            case LONG:
+            case FLOAT:
+            case DOUBLE:
+                switch (((PrimitiveTypeInfo) parameters[1]).getPrimitiveCategory()) {
+                    case BYTE:
+                    case SHORT:
+                    case INT:
+                    case LONG:
+                    case FLOAT:
+                    case DOUBLE:
+                        return new GenericUDAFCovarianceEvaluator();
+                    case STRING:
+                    case BOOLEAN:
+                    default:
+                        throw new UDFArgumentTypeException(1, "Only numeric or string type arguments are accepted but "
+                                + parameters[1].getTypeName() + " is passed.");
+                }
+            case STRING:
+            case BOOLEAN:
+            default:
+                throw new UDFArgumentTypeException(0, "Only numeric or string type arguments are accepted but "
+                        + parameters[0].getTypeName() + " is passed.");
+        }
+    }
+
+    /**
+     * Evaluate the variance using the algorithm described in
+     * http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance,
+     * presumably by Pébay, Philippe (2008), in "Formulas for Robust, One-Pass
+     * Parallel Computation of Covariances and Arbitrary-Order Statistical
+     * Moments", Technical Report SAND2008-6212, Sandia National Laboratories,
+     * http://infoserve.sandia.gov/sand_doc/2008/086212.pdf
+     * Incremental: n : <count> mx_n = mx_(n-1) + [x_n - mx_(n-1)]/n : <xavg>
+     * my_n = my_(n-1) + [y_n - my_(n-1)]/n : <yavg> c_n = c_(n-1) + (x_n -
+     * mx_(n-1))*(y_n - my_n) : <covariance * n>
+     * Merge: c_X = c_A + c_B + (mx_A - mx_B)*(my_A - my_B)*n_A*n_B/n_X
+     * This one-pass algorithm is stable.
+     */
+    public static class GenericUDAFCovarianceEvaluator extends GenericUDAFEvaluator {
+
+        // For PARTIAL1 and COMPLETE
+        private PrimitiveObjectInspector xInputOI;
+        private PrimitiveObjectInspector yInputOI;
+
+        // For PARTIAL2 and FINAL
+        private StructObjectInspector soi;
+        private StructField countField;
+        private StructField xavgField;
+        private StructField yavgField;
+        private StructField covarField;
+        private LongObjectInspector countFieldOI;
+        private DoubleObjectInspector xavgFieldOI;
+        private DoubleObjectInspector yavgFieldOI;
+        private DoubleObjectInspector covarFieldOI;
+
+        // For PARTIAL1 and PARTIAL2
+        private Object[] partialResult;
+
+        // For FINAL and COMPLETE
+        private DoubleWritable result;
+
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            super.init(m, parameters);
+
+            // init input
+            if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+                assert (parameters.length == 2);
+                xInputOI = (PrimitiveObjectInspector) parameters[0];
+                yInputOI = (PrimitiveObjectInspector) parameters[1];
+            } else {
+                assert (parameters.length == 1);
+                soi = (StructObjectInspector) parameters[0];
+
+                countField = soi.getStructFieldRef("count");
+                xavgField = soi.getStructFieldRef("xavg");
+                yavgField = soi.getStructFieldRef("yavg");
+                covarField = soi.getStructFieldRef("covar");
+
+                countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
+                xavgFieldOI = (DoubleObjectInspector) xavgField.getFieldObjectInspector();
+                yavgFieldOI = (DoubleObjectInspector) yavgField.getFieldObjectInspector();
+                covarFieldOI = (DoubleObjectInspector) covarField.getFieldObjectInspector();
+            }
+
+            // init output
+            if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+                // The output of a partial aggregation is a struct containing
+                // a long count, two double averages, and a double covariance.
+
+                ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+
+                foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+                ArrayList<String> fname = new ArrayList<String>();
+                fname.add("count");
+                fname.add("xavg");
+                fname.add("yavg");
+                fname.add("covar");
+
+                partialResult = new Object[4];
+                partialResult[0] = new LongWritable(0);
+                partialResult[1] = new DoubleWritable(0);
+                partialResult[2] = new DoubleWritable(0);
+                partialResult[3] = new DoubleWritable(0);
+
+                return ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+
+            } else {
+                setResult(new DoubleWritable(0));
+                return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+            }
+        }
+
+        static class StdAgg implements SerializableBuffer {
+            long count; // number n of elements
+            double xavg; // average of x elements
+            double yavg; // average of y elements
+            double covar; // n times the covariance
+
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                count = BufferSerDeUtil.getLong(data, start);
+                start += 8;
+                xavg = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                yavg = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                covar = BufferSerDeUtil.getDouble(data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeLong(count, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(xavg, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(yavg, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(covar, data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeLong(count);
+                output.writeDouble(xavg);
+                output.writeDouble(yavg);
+                output.writeDouble(covar);
+            }
+        };
+
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            StdAgg result = new StdAgg();
+            reset(result);
+            return result;
+        }
+
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            myagg.count = 0;
+            myagg.xavg = 0;
+            myagg.yavg = 0;
+            myagg.covar = 0;
+        }
+
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 2);
+            Object px = parameters[0];
+            Object py = parameters[1];
+            if (px != null && py != null) {
+                StdAgg myagg = (StdAgg) agg;
+                double vx = PrimitiveObjectInspectorUtils.getDouble(px, xInputOI);
+                double vy = PrimitiveObjectInspectorUtils.getDouble(py, yInputOI);
+                myagg.count++;
+                myagg.yavg = myagg.yavg + (vy - myagg.yavg) / myagg.count;
+                if (myagg.count > 1) {
+                    myagg.covar += (vx - myagg.xavg) * (vy - myagg.yavg);
+                }
+                myagg.xavg = myagg.xavg + (vx - myagg.xavg) / myagg.count;
+            }
+        }
+
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            ((LongWritable) partialResult[0]).set(myagg.count);
+            ((DoubleWritable) partialResult[1]).set(myagg.xavg);
+            ((DoubleWritable) partialResult[2]).set(myagg.yavg);
+            ((DoubleWritable) partialResult[3]).set(myagg.covar);
+            return partialResult;
+        }
+
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                StdAgg myagg = (StdAgg) agg;
+
+                Object partialCount = soi.getStructFieldData(partial, countField);
+                Object partialXAvg = soi.getStructFieldData(partial, xavgField);
+                Object partialYAvg = soi.getStructFieldData(partial, yavgField);
+                Object partialCovar = soi.getStructFieldData(partial, covarField);
+
+                long nA = myagg.count;
+                long nB = countFieldOI.get(partialCount);
+
+                if (nA == 0) {
+                    // Just copy the information since there is nothing so far
+                    myagg.count = countFieldOI.get(partialCount);
+                    myagg.xavg = xavgFieldOI.get(partialXAvg);
+                    myagg.yavg = yavgFieldOI.get(partialYAvg);
+                    myagg.covar = covarFieldOI.get(partialCovar);
+                }
+
+                if (nA != 0 && nB != 0) {
+                    // Merge the two partials
+                    double xavgA = myagg.xavg;
+                    double yavgA = myagg.yavg;
+                    double xavgB = xavgFieldOI.get(partialXAvg);
+                    double yavgB = yavgFieldOI.get(partialYAvg);
+                    double covarB = covarFieldOI.get(partialCovar);
+
+                    myagg.count += nB;
+                    myagg.xavg = (xavgA * nA + xavgB * nB) / myagg.count;
+                    myagg.yavg = (yavgA * nA + yavgB * nB) / myagg.count;
+                    myagg.covar += covarB + (xavgA - xavgB) * (yavgA - yavgB) * ((double) (nA * nB) / myagg.count);
+                }
+            }
+        }
+
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+
+            if (myagg.count == 0) { // SQL standard - return null for zero
+                                    // elements
+                return null;
+            } else {
+                getResult().set(myagg.covar / (myagg.count));
+                return getResult();
+            }
+        }
+
+        public void setResult(DoubleWritable result) {
+            this.result = result;
+        }
+
+        public DoubleWritable getResult() {
+            return result;
+        }
+    }
+
+}
diff --git a/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
new file mode 100644
index 0000000..afdc397
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
@@ -0,0 +1,272 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.util.StringUtils;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * GenericUDAFSum.
+ */
+@Description(name = "sum", value = "_FUNC_(x) - Returns the sum of a set of numbers")
+public class GenericUDAFSum extends AbstractGenericUDAFResolver {
+
+    static final Log LOG = LogFactory.getLog(GenericUDAFSum.class.getName());
+
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        if (parameters.length != 1) {
+            throw new UDFArgumentTypeException(parameters.length - 1, "Exactly one argument is expected.");
+        }
+
+        if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+                    + parameters[0].getTypeName() + " is passed.");
+        }
+        switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+            case BYTE:
+            case SHORT:
+            case INT:
+            case LONG:
+                return new GenericUDAFSumLong();
+            case FLOAT:
+            case DOUBLE:
+            case STRING:
+                return new GenericUDAFSumDouble();
+            case BOOLEAN:
+            default:
+                throw new UDFArgumentTypeException(0, "Only numeric or string type arguments are accepted but "
+                        + parameters[0].getTypeName() + " is passed.");
+        }
+    }
+
+    /**
+     * GenericUDAFSumDouble.
+     */
+    public static class GenericUDAFSumDouble extends GenericUDAFEvaluator {
+        private PrimitiveObjectInspector inputOI;
+        private DoubleWritable result;
+
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            super.init(m, parameters);
+            result = new DoubleWritable(0);
+            inputOI = (PrimitiveObjectInspector) parameters[0];
+            return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+        }
+
+        /** class for storing double sum value. */
+        static class SumDoubleAgg implements SerializableBuffer {
+            boolean empty;
+            double sum;
+
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                empty = BufferSerDeUtil.getBoolean(data, start);
+                start += 1;
+                sum = BufferSerDeUtil.getDouble(data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeBoolean(empty, data, start);
+                start += 1;
+                BufferSerDeUtil.writeDouble(sum, data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeBoolean(empty);
+                output.writeDouble(sum);
+            }
+        }
+
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            SumDoubleAgg result = new SumDoubleAgg();
+            reset(result);
+            return result;
+        }
+
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            SumDoubleAgg myagg = (SumDoubleAgg) agg;
+            myagg.empty = true;
+            myagg.sum = 0;
+        }
+
+        boolean warned = false;
+
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            try {
+                merge(agg, parameters[0]);
+            } catch (NumberFormatException e) {
+                if (!warned) {
+                    warned = true;
+                    LOG.warn(getClass().getSimpleName() + " " + StringUtils.stringifyException(e));
+                    LOG.warn(getClass().getSimpleName() + " ignoring similar exceptions.");
+                }
+            }
+        }
+
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            return terminate(agg);
+        }
+
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                SumDoubleAgg myagg = (SumDoubleAgg) agg;
+                myagg.empty = false;
+                myagg.sum += PrimitiveObjectInspectorUtils.getDouble(partial, inputOI);
+            }
+        }
+
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            SumDoubleAgg myagg = (SumDoubleAgg) agg;
+            if (myagg.empty) {
+                return null;
+            }
+            result.set(myagg.sum);
+            return result;
+        }
+
+    }
+
+    /**
+     * GenericUDAFSumLong.
+     */
+    public static class GenericUDAFSumLong extends GenericUDAFEvaluator {
+        private PrimitiveObjectInspector inputOI;
+        private LongWritable result;
+
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            super.init(m, parameters);
+            result = new LongWritable(0);
+            inputOI = (PrimitiveObjectInspector) parameters[0];
+            return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
+        }
+
+        /** class for storing double sum value. */
+        static class SumLongAgg implements SerializableBuffer {
+            boolean empty;
+            long sum;
+
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                empty = BufferSerDeUtil.getBoolean(data, start);
+                start += 1;
+                sum = BufferSerDeUtil.getLong(data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeBoolean(empty, data, start);
+                start += 1;
+                BufferSerDeUtil.writeLong(sum, data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeBoolean(empty);
+                output.writeLong(sum);
+            }
+        }
+
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            SumLongAgg result = new SumLongAgg();
+            reset(result);
+            return result;
+        }
+
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            SumLongAgg myagg = (SumLongAgg) agg;
+            myagg.empty = true;
+            myagg.sum = 0;
+        }
+
+        private boolean warned = false;
+
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            try {
+                merge(agg, parameters[0]);
+            } catch (NumberFormatException e) {
+                if (!warned) {
+                    warned = true;
+                    LOG.warn(getClass().getSimpleName() + " " + StringUtils.stringifyException(e));
+                }
+            }
+        }
+
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            return terminate(agg);
+        }
+
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                SumLongAgg myagg = (SumLongAgg) agg;
+                myagg.sum += PrimitiveObjectInspectorUtils.getLong(partial, inputOI);
+                myagg.empty = false;
+            }
+        }
+
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            SumLongAgg myagg = (SumLongAgg) agg;
+            if (myagg.empty) {
+                return null;
+            }
+            result.set(myagg.sum);
+            return result;
+        }
+
+    }
+
+}
diff --git a/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
new file mode 100644
index 0000000..e839008
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
@@ -0,0 +1,305 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.util.StringUtils;
+
+import edu.uci.ics.hivesterix.runtime.evaluator.BufferSerDeUtil;
+import edu.uci.ics.hivesterix.runtime.evaluator.SerializableBuffer;
+
+/**
+ * Compute the variance. This class is extended by: GenericUDAFVarianceSample
+ * GenericUDAFStd GenericUDAFStdSample
+ */
+@Description(name = "variance,var_pop", value = "_FUNC_(x) - Returns the variance of a set of numbers")
+public class GenericUDAFVariance extends AbstractGenericUDAFResolver {
+
+    static final Log LOG = LogFactory.getLog(GenericUDAFVariance.class.getName());
+
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+        if (parameters.length != 1) {
+            throw new UDFArgumentTypeException(parameters.length - 1, "Exactly one argument is expected.");
+        }
+
+        if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
+            throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+                    + parameters[0].getTypeName() + " is passed.");
+        }
+        switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+            case BYTE:
+            case SHORT:
+            case INT:
+            case LONG:
+            case FLOAT:
+            case DOUBLE:
+            case STRING:
+                return new GenericUDAFVarianceEvaluator();
+            case BOOLEAN:
+            default:
+                throw new UDFArgumentTypeException(0, "Only numeric or string type arguments are accepted but "
+                        + parameters[0].getTypeName() + " is passed.");
+        }
+    }
+
+    /**
+     * Evaluate the variance using the algorithm described by Chan, Golub, and
+     * LeVeque in
+     * "Algorithms for computing the sample variance: analysis and recommendations"
+     * The American Statistician, 37 (1983) pp. 242--247.
+     * variance = variance1 + variance2 + n/(m*(m+n)) * pow(((m/n)*t1 - t2),2)
+     * where: - variance is sum[x-avg^2] (this is actually n times the variance)
+     * and is updated at every step. - n is the count of elements in chunk1 - m
+     * is the count of elements in chunk2 - t1 = sum of elements in chunk1, t2 =
+     * sum of elements in chunk2.
+     * This algorithm was proven to be numerically stable by J.L. Barlow in
+     * "Error analysis of a pairwise summation algorithm to compute sample variance"
+     * Numer. Math, 58 (1991) pp. 583--590
+     */
+    public static class GenericUDAFVarianceEvaluator extends GenericUDAFEvaluator {
+
+        // For PARTIAL1 and COMPLETE
+        private PrimitiveObjectInspector inputOI;
+
+        // For PARTIAL2 and FINAL
+        private StructObjectInspector soi;
+        private StructField countField;
+        private StructField sumField;
+        private StructField varianceField;
+        private LongObjectInspector countFieldOI;
+        private DoubleObjectInspector sumFieldOI;
+
+        // For PARTIAL1 and PARTIAL2
+        private Object[] partialResult;
+
+        // For FINAL and COMPLETE
+        private DoubleWritable result;
+
+        @Override
+        public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            super.init(m, parameters);
+
+            // init input
+            if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+                inputOI = (PrimitiveObjectInspector) parameters[0];
+            } else {
+                soi = (StructObjectInspector) parameters[0];
+
+                countField = soi.getStructFieldRef("count");
+                sumField = soi.getStructFieldRef("sum");
+                varianceField = soi.getStructFieldRef("variance");
+
+                countFieldOI = (LongObjectInspector) countField.getFieldObjectInspector();
+                sumFieldOI = (DoubleObjectInspector) sumField.getFieldObjectInspector();
+            }
+
+            // init output
+            if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
+                // The output of a partial aggregation is a struct containing
+                // a long count and doubles sum and variance.
+
+                ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+
+                foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+                foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+                ArrayList<String> fname = new ArrayList<String>();
+                fname.add("count");
+                fname.add("sum");
+                fname.add("variance");
+
+                partialResult = new Object[3];
+                partialResult[0] = new LongWritable(0);
+                partialResult[1] = new DoubleWritable(0);
+                partialResult[2] = new DoubleWritable(0);
+
+                return ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+
+            } else {
+                setResult(new DoubleWritable(0));
+                return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
+            }
+        }
+
+        static class StdAgg implements SerializableBuffer {
+            long count; // number of elements
+            double sum; // sum of elements
+            double variance; // sum[x-avg^2] (this is actually n times the
+                             // variance)
+
+            @Override
+            public void deSerializeAggBuffer(byte[] data, int start, int len) {
+                count = BufferSerDeUtil.getLong(data, start);
+                start += 8;
+                sum = BufferSerDeUtil.getDouble(data, start);
+                start += 8;
+                variance = BufferSerDeUtil.getDouble(data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(byte[] data, int start, int len) {
+                BufferSerDeUtil.writeLong(count, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(sum, data, start);
+                start += 8;
+                BufferSerDeUtil.writeDouble(variance, data, start);
+            }
+
+            @Override
+            public void serializeAggBuffer(DataOutput output) throws IOException {
+                output.writeLong(count);
+                output.writeDouble(sum);
+                output.writeDouble(variance);
+            }
+        };
+
+        @Override
+        public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+            StdAgg result = new StdAgg();
+            reset(result);
+            return result;
+        }
+
+        @Override
+        public void reset(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            myagg.count = 0;
+            myagg.sum = 0;
+            myagg.variance = 0;
+        }
+
+        private boolean warned = false;
+
+        @Override
+        public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+            assert (parameters.length == 1);
+            Object p = parameters[0];
+            if (p != null) {
+                StdAgg myagg = (StdAgg) agg;
+                try {
+                    double v = PrimitiveObjectInspectorUtils.getDouble(p, inputOI);
+                    myagg.count++;
+                    myagg.sum += v;
+                    if (myagg.count > 1) {
+                        double t = myagg.count * v - myagg.sum;
+                        myagg.variance += (t * t) / ((double) myagg.count * (myagg.count - 1));
+                    }
+                } catch (NumberFormatException e) {
+                    if (!warned) {
+                        warned = true;
+                        LOG.warn(getClass().getSimpleName() + " " + StringUtils.stringifyException(e));
+                        LOG.warn(getClass().getSimpleName() + " ignoring similar exceptions.");
+                    }
+                }
+            }
+        }
+
+        @Override
+        public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+            ((LongWritable) partialResult[0]).set(myagg.count);
+            ((DoubleWritable) partialResult[1]).set(myagg.sum);
+            ((DoubleWritable) partialResult[2]).set(myagg.variance);
+            return partialResult;
+        }
+
+        @Override
+        public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+            if (partial != null) {
+                StdAgg myagg = (StdAgg) agg;
+
+                Object partialCount = soi.getStructFieldData(partial, countField);
+                Object partialSum = soi.getStructFieldData(partial, sumField);
+                Object partialVariance = soi.getStructFieldData(partial, varianceField);
+
+                long n = myagg.count;
+                long m = countFieldOI.get(partialCount);
+
+                if (n == 0) {
+                    // Just copy the information since there is nothing so far
+                    myagg.variance = sumFieldOI.get(partialVariance);
+                    myagg.count = countFieldOI.get(partialCount);
+                    myagg.sum = sumFieldOI.get(partialSum);
+                }
+
+                if (m != 0 && n != 0) {
+                    // Merge the two partials
+
+                    double a = myagg.sum;
+                    double b = sumFieldOI.get(partialSum);
+
+                    myagg.count += m;
+                    myagg.sum += b;
+                    double t = (m / (double) n) * a - b;
+                    myagg.variance += sumFieldOI.get(partialVariance) + ((n / (double) m) / ((double) n + m)) * t * t;
+                }
+            }
+        }
+
+        @Override
+        public Object terminate(AggregationBuffer agg) throws HiveException {
+            StdAgg myagg = (StdAgg) agg;
+
+            if (myagg.count == 0) { // SQL standard - return null for zero
+                                    // elements
+                return null;
+            } else {
+                if (myagg.count > 1) {
+                    getResult().set(myagg.variance / (myagg.count));
+                } else { // for one element the variance is always 0
+                    getResult().set(0);
+                }
+                return getResult();
+            }
+        }
+
+        public void setResult(DoubleWritable result) {
+            this.result = result;
+        }
+
+        public DoubleWritable getResult() {
+            return result;
+        }
+    }
+
+}
diff --git a/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java
new file mode 100644
index 0000000..7920001
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.lazy.objectinspector;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.io.Text;
+
+/**
+ * ObjectInspectorFactory is the primary way to create new ObjectInspector
+ * instances.
+ * SerDe classes should call the static functions in this library to create an
+ * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
+ * The reason of having caches here is that ObjectInspectors do not have an
+ * internal state - so ObjectInspectors with the same construction parameters
+ * should result in exactly the same ObjectInspector.
+ */
+public final class LazyObjectInspectorFactory {
+
+    static ConcurrentHashMap<ArrayList<Object>, LazySimpleStructObjectInspector> cachedLazySimpleStructObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazySimpleStructObjectInspector>();
+
+    public static LazySimpleStructObjectInspector getLazySimpleStructObjectInspector(List<String> structFieldNames,
+            List<ObjectInspector> structFieldObjectInspectors, byte separator, Text nullSequence,
+            boolean lastColumnTakesRest, boolean escaped, byte escapeChar) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(structFieldNames);
+        signature.add(structFieldObjectInspectors);
+        signature.add(Byte.valueOf(separator));
+        signature.add(nullSequence.toString());
+        signature.add(Boolean.valueOf(lastColumnTakesRest));
+        signature.add(Boolean.valueOf(escaped));
+        signature.add(Byte.valueOf(escapeChar));
+        LazySimpleStructObjectInspector result = cachedLazySimpleStructObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazySimpleStructObjectInspector(structFieldNames, structFieldObjectInspectors, separator,
+                    nullSequence, lastColumnTakesRest, escaped, escapeChar);
+            cachedLazySimpleStructObjectInspector.put(signature, result);
+        }
+        return result;
+    }
+
+    static ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector> cachedLazySimpleListObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector>();
+
+    public static LazyListObjectInspector getLazySimpleListObjectInspector(ObjectInspector listElementObjectInspector,
+            byte separator, Text nullSequence, boolean escaped, byte escapeChar) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(listElementObjectInspector);
+        signature.add(Byte.valueOf(separator));
+        signature.add(nullSequence.toString());
+        signature.add(Boolean.valueOf(escaped));
+        signature.add(Byte.valueOf(escapeChar));
+        LazyListObjectInspector result = cachedLazySimpleListObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyListObjectInspector(listElementObjectInspector, separator, nullSequence, escaped,
+                    escapeChar);
+            cachedLazySimpleListObjectInspector.put(signature, result);
+        }
+        return result;
+    }
+
+    static ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector> cachedLazySimpleMapObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector>();
+
+    public static LazyMapObjectInspector getLazySimpleMapObjectInspector(ObjectInspector mapKeyObjectInspector,
+            ObjectInspector mapValueObjectInspector, byte itemSeparator, byte keyValueSeparator, Text nullSequence,
+            boolean escaped, byte escapeChar) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(mapKeyObjectInspector);
+        signature.add(mapValueObjectInspector);
+        signature.add(Byte.valueOf(itemSeparator));
+        signature.add(Byte.valueOf(keyValueSeparator));
+        signature.add(nullSequence.toString());
+        signature.add(Boolean.valueOf(escaped));
+        signature.add(Byte.valueOf(escapeChar));
+        LazyMapObjectInspector result = cachedLazySimpleMapObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyMapObjectInspector(mapKeyObjectInspector, mapValueObjectInspector, itemSeparator,
+                    keyValueSeparator, nullSequence, escaped, escapeChar);
+            cachedLazySimpleMapObjectInspector.put(signature, result);
+        }
+        return result;
+    }
+
+    static ConcurrentHashMap<List<Object>, LazyUnionObjectInspector> cachedLazyUnionObjectInspector = new ConcurrentHashMap<List<Object>, LazyUnionObjectInspector>();
+
+    public static LazyUnionObjectInspector getLazyUnionObjectInspector(List<ObjectInspector> ois, byte separator,
+            Text nullSequence, boolean escaped, byte escapeChar) {
+        List<Object> signature = new ArrayList<Object>();
+        signature.add(ois);
+        signature.add(Byte.valueOf(separator));
+        signature.add(nullSequence.toString());
+        signature.add(Boolean.valueOf(escaped));
+        signature.add(Byte.valueOf(escapeChar));
+        LazyUnionObjectInspector result = cachedLazyUnionObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyUnionObjectInspector(ois, separator, nullSequence, escaped, escapeChar);
+            cachedLazyUnionObjectInspector.put(signature, result);
+        }
+        return result;
+    }
+
+    private LazyObjectInspectorFactory() {
+        // prevent instantiation
+    }
+}
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
new file mode 100644
index 0000000..95b999e
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.typeinfo;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+
+/**
+ * TypeInfoFactory can be used to create the TypeInfo object for any types.
+ * TypeInfo objects are all read-only so we can reuse them easily.
+ * TypeInfoFactory has internal cache to make sure we don't create 2 TypeInfo
+ * objects that represents the same type.
+ */
+public final class TypeInfoFactory {
+
+    static ConcurrentHashMap<String, TypeInfo> cachedPrimitiveTypeInfo = new ConcurrentHashMap<String, TypeInfo>();
+
+    private TypeInfoFactory() {
+        // prevent instantiation
+    }
+
+    public static TypeInfo getPrimitiveTypeInfo(String typeName) {
+        if (null == PrimitiveObjectInspectorUtils.getTypeEntryFromTypeName(typeName)) {
+            throw new RuntimeException("Cannot getPrimitiveTypeInfo for " + typeName);
+        }
+        TypeInfo result = cachedPrimitiveTypeInfo.get(typeName);
+        if (result == null) {
+            result = new PrimitiveTypeInfo(typeName);
+            cachedPrimitiveTypeInfo.put(typeName, result);
+        }
+        return result;
+    }
+
+    public static final TypeInfo voidTypeInfo = getPrimitiveTypeInfo(Constants.VOID_TYPE_NAME);
+    public static final TypeInfo booleanTypeInfo = getPrimitiveTypeInfo(Constants.BOOLEAN_TYPE_NAME);
+    public static final TypeInfo intTypeInfo = getPrimitiveTypeInfo(Constants.INT_TYPE_NAME);
+    public static final TypeInfo longTypeInfo = getPrimitiveTypeInfo(Constants.BIGINT_TYPE_NAME);
+    public static final TypeInfo stringTypeInfo = getPrimitiveTypeInfo(Constants.STRING_TYPE_NAME);
+    public static final TypeInfo floatTypeInfo = getPrimitiveTypeInfo(Constants.FLOAT_TYPE_NAME);
+    public static final TypeInfo doubleTypeInfo = getPrimitiveTypeInfo(Constants.DOUBLE_TYPE_NAME);
+    public static final TypeInfo byteTypeInfo = getPrimitiveTypeInfo(Constants.TINYINT_TYPE_NAME);
+    public static final TypeInfo shortTypeInfo = getPrimitiveTypeInfo(Constants.SMALLINT_TYPE_NAME);
+
+    public static final TypeInfo unknownTypeInfo = getPrimitiveTypeInfo("unknown");
+
+    public static TypeInfo getPrimitiveTypeInfoFromPrimitiveWritable(Class<?> clazz) {
+        String typeName = PrimitiveObjectInspectorUtils.getTypeNameFromPrimitiveWritable(clazz);
+        if (typeName == null) {
+            throw new RuntimeException("Internal error: Cannot get typeName for " + clazz);
+        }
+        return getPrimitiveTypeInfo(typeName);
+    }
+
+    public static TypeInfo getPrimitiveTypeInfoFromJavaPrimitive(Class<?> clazz) {
+        return getPrimitiveTypeInfo(PrimitiveObjectInspectorUtils.getTypeNameFromPrimitiveJava(clazz));
+    }
+
+    static ConcurrentHashMap<ArrayList<List<?>>, TypeInfo> cachedStructTypeInfo = new ConcurrentHashMap<ArrayList<List<?>>, TypeInfo>();
+
+    public static TypeInfo getStructTypeInfo(List<String> names, List<TypeInfo> typeInfos) {
+        ArrayList<List<?>> signature = new ArrayList<List<?>>(2);
+        signature.add(names);
+        signature.add(typeInfos);
+        TypeInfo result = cachedStructTypeInfo.get(signature);
+        if (result == null) {
+            result = new StructTypeInfo(names, typeInfos);
+            cachedStructTypeInfo.put(signature, result);
+        }
+        return result;
+    }
+
+    static ConcurrentHashMap<List<?>, TypeInfo> cachedUnionTypeInfo = new ConcurrentHashMap<List<?>, TypeInfo>();
+
+    public static TypeInfo getUnionTypeInfo(List<TypeInfo> typeInfos) {
+        TypeInfo result = cachedUnionTypeInfo.get(typeInfos);
+        if (result == null) {
+            result = new UnionTypeInfo(typeInfos);
+            cachedUnionTypeInfo.put(typeInfos, result);
+        }
+        return result;
+    }
+
+    static ConcurrentHashMap<TypeInfo, TypeInfo> cachedListTypeInfo = new ConcurrentHashMap<TypeInfo, TypeInfo>();
+
+    public static TypeInfo getListTypeInfo(TypeInfo elementTypeInfo) {
+        TypeInfo result = cachedListTypeInfo.get(elementTypeInfo);
+        if (result == null) {
+            result = new ListTypeInfo(elementTypeInfo);
+            cachedListTypeInfo.put(elementTypeInfo, result);
+        }
+        return result;
+    }
+
+    static ConcurrentHashMap<ArrayList<TypeInfo>, TypeInfo> cachedMapTypeInfo = new ConcurrentHashMap<ArrayList<TypeInfo>, TypeInfo>();
+
+    public static TypeInfo getMapTypeInfo(TypeInfo keyTypeInfo, TypeInfo valueTypeInfo) {
+        ArrayList<TypeInfo> signature = new ArrayList<TypeInfo>(2);
+        signature.add(keyTypeInfo);
+        signature.add(valueTypeInfo);
+        TypeInfo result = cachedMapTypeInfo.get(signature);
+        if (result == null) {
+            result = new MapTypeInfo(keyTypeInfo, valueTypeInfo);
+            cachedMapTypeInfo.put(signature, result);
+        }
+        return result;
+    };
+
+}
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/src/main/resources/conf/cluster.properties b/hivesterix/hivesterix-dist/src/main/resources/conf/cluster.properties
new file mode 100644
index 0000000..2d2401a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/conf/cluster.properties
@@ -0,0 +1,37 @@
+#The CC port for Hyracks clients
+CC_CLIENTPORT=3099
+
+#The CC port for Hyracks cluster management
+CC_CLUSTERPORT=1099
+
+#The directory of hyracks binaries
+HYRACKS_HOME=../../../../hyracks
+
+#The tmp directory for cc to install jars
+CCTMP_DIR=/tmp/t1
+
+#The tmp directory for nc to install jars
+NCTMP_DIR=/tmp/t2
+
+#The directory to put cc logs
+CCLOGS_DIR=$CCTMP_DIR/logs
+
+#The directory to put nc logs
+NCLOGS_DIR=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS="/tmp/t3,/tmp/t4"
+
+#The JAVA_HOME
+JAVA_HOME=$JAVA_HOME
+
+#The frame size of the internal dataflow engine
+FRAME_SIZE=65536
+
+#CC JAVA_OPTS
+CCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+# Yourkit option: -agentpath:/grid/0/dev/vborkar/tools/yjp-10.0.4/bin/linux-x86-64/libyjpagent.so=port=20001"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7002,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/hivesterix/hivesterix-dist/src/main/resources/conf/configuration.xsl b/hivesterix/hivesterix-dist/src/main/resources/conf/configuration.xsl
new file mode 100644
index 0000000..377cdbe
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/conf/configuration.xsl
@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
+<xsl:output method="html"/>
+<xsl:template match="configuration">
+<html>
+<body>
+<table border="1">
+<tr>
+ <td>name</td>
+ <td>value</td>
+ <td>description</td>
+</tr>
+<xsl:for-each select="property">
+<tr>
+  <td><a name="{name}"><xsl:value-of select="name"/></a></td>
+  <td><xsl:value-of select="value"/></td>
+  <td><xsl:value-of select="description"/></td>
+</tr>
+</xsl:for-each>
+</table>
+</body>
+</html>
+</xsl:template>
+</xsl:stylesheet>
diff --git a/hivesterix/hivesterix-dist/src/main/resources/conf/debugnc.properties b/hivesterix/hivesterix-dist/src/main/resources/conf/debugnc.properties
new file mode 100755
index 0000000..27afa26
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/conf/debugnc.properties
@@ -0,0 +1,12 @@
+#The tmp directory for nc to install jars
+NCTMP_DIR2=/tmp/t-1
+
+#The directory to put nc logs
+NCLOGS_DIR2=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS2="/tmp/t-2,/tmp/t-3"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS2="-Xdebug -Xrunjdwp:transport=dt_socket,address=7003,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/hivesterix/hivesterix-dist/src/main/resources/conf/hive-default.xml b/hivesterix/hivesterix-dist/src/main/resources/conf/hive-default.xml
new file mode 100644
index 0000000..587eede
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/conf/hive-default.xml
@@ -0,0 +1,758 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+	<!-- Hive Configuration can either be stored in this file or in the hadoop 
+		configuration files -->
+	<!-- that are implied by Hadoop setup variables. -->
+	<!-- Aside from Hadoop setup variables - this file is provided as a convenience 
+		so that Hive -->
+	<!-- users do not have to edit hadoop configuration files (that may be managed 
+		as a centralized -->
+	<!-- resource). -->
+
+	<!-- Hive Execution Parameters -->
+	<property>
+		<name>mapred.reduce.tasks</name>
+		<value>-1</value>
+		<description>The default number of reduce tasks per job. Typically set
+			to a prime close to the number of available hosts. Ignored when
+			mapred.job.tracker is "local". Hadoop set this to 1 by default,
+			whereas hive uses -1 as its default value.
+			By setting this property to -1, Hive will automatically figure out what
+			should be the number of reducers.
+  </description>
+
+        <property>
+		<name>hive.hyracks.connectorpolicy</name>
+		<value>PIPELINING</value>
+        </property>
+
+	<property>
+		<name>hive.hyracks.parrallelism</name>
+		<value>4</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.groupby.external</name>
+		<value>true</value>
+	</property>
+	
+	<property>
+		<name>hive.algebricks.groupby.external.memory</name>
+		<value>33554432</value>
+	</property>
+	
+	<property>
+		<name>hive.algebricks.sort.memory</name>
+		<value>33554432</value>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.bytes.per.reducer</name>
+		<value>1000000000</value>
+		<description>size per reducer.The default is 1G, i.e if the input size
+			is 10G, it will use 10 reducers.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.max</name>
+		<value>999</value>
+		<description>max number of reducers will be used. If the one
+			specified in the configuration parameter mapred.reduce.tasks is
+			negative, hive will use this one as the max number of reducers when
+			automatically determine number of reducers.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.scratchdir</name>
+		<value>/hive-${user.name}</value>
+		<description>Scratch space for Hive jobs</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode</name>
+		<value>false</value>
+		<description>whether hive is running in test mode. If yes, it turns on
+			sampling and prefixes the output tablename</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.prefix</name>
+		<value>test_</value>
+		<description>if hive is running in test mode, prefixes the output
+			table by this string</description>
+	</property>
+
+	<!-- If the input table is not bucketed, the denominator of the tablesample 
+		is determinied by the parameter below -->
+	<!-- For example, the following query: -->
+	<!-- INSERT OVERWRITE TABLE dest -->
+	<!-- SELECT col1 from src -->
+	<!-- would be converted to -->
+	<!-- INSERT OVERWRITE TABLE test_dest -->
+	<!-- SELECT col1 from src TABLESAMPLE (BUCKET 1 out of 32 on rand(1)) -->
+	<property>
+		<name>hive.test.mode.samplefreq</name>
+		<value>32</value>
+		<description>if hive is running in test mode and table is not
+			bucketed, sampling frequency</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.nosamplelist</name>
+		<value></value>
+		<description>if hive is running in test mode, dont sample the above
+			comma seperated list of tables</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.local</name>
+		<value>true</value>
+		<description>controls whether to connect to remove metastore server or
+			open a new metastore server in Hive Client JVM</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionURL</name>
+		<value>jdbc:derby:;databaseName=metastore_db;create=true</value>
+		<description>JDBC connect string for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionDriverName</name>
+		<value>org.apache.derby.jdbc.EmbeddedDriver</value>
+		<description>Driver class name for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.PersistenceManagerFactoryClass</name>
+		<value>org.datanucleus.jdo.JDOPersistenceManagerFactory</value>
+		<description>class implementing the jdo persistence</description>
+	</property>
+
+	<property>
+		<name>datanucleus.connectionPoolingType</name>
+		<value>DBCP</value>
+		<description>Uses a DBCP connection pool for JDBC metastore
+		</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.DetachAllOnCommit</name>
+		<value>true</value>
+		<description>detaches all objects from session so that they can be
+			used after transaction is committed</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.NonTransactionalRead</name>
+		<value>true</value>
+		<description>reads outside of transactions</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionUserName</name>
+		<value>APP</value>
+		<description>username to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionPassword</name>
+		<value>mine</value>
+		<description>password to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateTables</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateColumns</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateConstraints</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.storeManagerType</name>
+		<value>rdbms</value>
+		<description>metadata store type</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoCreateSchema</name>
+		<value>true</value>
+		<description>creates necessary schema on a startup if one doesn't
+			exist. set this to false, after creating it once</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoStartMechanismMode</name>
+		<value>checked</value>
+		<description>throw exception if metadata tables are incorrect
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.transactionIsolation</name>
+		<value>read-committed</value>
+		<description>Default transaction isolation level for identity
+			generation. </description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2</name>
+		<value>false</value>
+		<description>Use a level 2 cache. Turn this off if metadata is changed
+			independently of hive metastore server</description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2.type</name>
+		<value>SOFT</value>
+		<description>SOFT=soft reference based cache, WEAK=weak reference
+			based cache.</description>
+	</property>
+
+	<property>
+		<name>datanucleus.identifierFactory</name>
+		<value>datanucleus</value>
+		<description>Name of the identifier factory to use when generating
+			table/column names etc. 'datanucleus' is used for backward
+			compatibility</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.warehouse.dir</name>
+		<value>/user/hivesterix</value>
+		<description>location of default database for the warehouse
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.connect.retries</name>
+		<value>5</value>
+		<description>Number of retries while opening a connection to metastore
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.rawstore.impl</name>
+		<value>org.apache.hadoop.hive.metastore.ObjectStore</value>
+		<description>Name of the class that implements
+			org.apache.hadoop.hive.metastore.rawstore interface. This class is
+			used to store and retrieval of raw metadata objects such as table,
+			database</description>
+	</property>
+
+	<property>
+		<name>hive.default.fileformat</name>
+		<value>TextFile</value>
+		<description>Default file format for CREATE TABLE statement. Options
+			are TextFile and SequenceFile. Users can explicitly say CREATE TABLE
+			... STORED AS &lt;TEXTFILE|SEQUENCEFILE&gt; to override</description>
+	</property>
+
+	<property>
+		<name>hive.fileformat.check</name>
+		<value>true</value>
+		<description>Whether to check file format or not when loading data
+			files</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr</name>
+		<value>true</value>
+		<description>Whether to use map-side aggregation in Hive Group By
+			queries</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.skewindata</name>
+		<value>false</value>
+		<description>Whether there is skew in data to optimize group by
+			queries</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.mapaggr.checkinterval</name>
+		<value>100000</value>
+		<description>Number of rows after which size of the grouping
+			keys/aggregation classes is performed</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.local.mem</name>
+		<value>0</value>
+		<description>For local mode, memory of the mappers/reducers
+		</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.percentmemory</name>
+		<value>0.5</value>
+		<description>Portion of total memory to be used by map-side grup
+			aggregation hash table</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.min.reduction</name>
+		<value>0.5</value>
+		<description>Hash aggregation will be turned off if the ratio between
+			hash
+			table size and input rows is bigger than this number. Set to 1 to make
+			sure
+			hash aggregation is never turned off.</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.cp</name>
+		<value>true</value>
+		<description>Whether to enable column pruner</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.ppd</name>
+		<value>true</value>
+		<description>Whether to enable predicate pushdown</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.pruner</name>
+		<value>true</value>
+		<description>Whether to enable the new partition pruner which depends
+			on predicate pushdown. If this is disabled,
+			the old partition pruner which is based on AST will be enabled.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.groupby</name>
+		<value>true</value>
+		<description>Whether to enable the bucketed group by from bucketed
+			partitions/tables.</description>
+	</property>
+
+	<property>
+		<name>hive.join.emit.interval</name>
+		<value>1000</value>
+		<description>How many rows in the right-most join operand Hive should
+			buffer before emitting the join result. </description>
+	</property>
+
+	<property>
+		<name>hive.join.cache.size</name>
+		<value>25000</value>
+		<description>How many rows in the joining tables (except the streaming
+			table) should be cached in memory. </description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.bucket.cache.size</name>
+		<value>100</value>
+		<description>How many values in each keys in the map-joined table
+			should be cached in memory. </description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.maxsize</name>
+		<value>100000</value>
+		<description>Maximum # of rows of the small table that can be handled
+			by map-side join. If the size is reached and hive.task.progress is
+			set, a fatal error counter is set and the job will be killed.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.cache.numrows</name>
+		<value>25000</value>
+		<description>How many rows should be cached by jdbm for map join.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.skewjoin</name>
+		<value>false</value>
+		<description>Whether to enable skew join optimization. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.key</name>
+		<value>100000</value>
+		<description>Determine if we get a skew key in join. If we see more
+			than the specified number of rows with the same key in join operator,
+			we think the key as a skew join key. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.map.tasks</name>
+		<value>10000</value>
+		<description> Determine the number of map task used in the follow up
+			map join job
+			for a skew join. It should be used together with
+			hive.skewjoin.mapjoin.min.split
+			to perform a fine grained control.</description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.min.split</name>
+		<value>33554432</value>
+		<description> Determine the number of map task at most used in the
+			follow up map join job
+			for a skew join by specifying the minimum split size. It should be used
+			together with
+			hive.skewjoin.mapjoin.map.tasks to perform a fine grained control.</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.mode</name>
+		<value>nonstrict</value>
+		<description>The mode in which the hive operations are being
+			performed. In strict mode, some risky queries are not allowed to run
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.maxerrsize</name>
+		<value>100000</value>
+		<description>Maximum number of bytes a script is allowed to emit to
+			standard error (per map-reduce task). This prevents runaway scripts
+			from filling logs partitions to capacity </description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.allow.partial.consumption</name>
+		<value>false</value>
+		<description> When enabled, this option allows a user script to exit
+			successfully without consuming all the data from the standard input.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.operator.id.env.var</name>
+		<value>HIVE_SCRIPT_OPERATOR_ID</value>
+		<description> Name of the environment variable that holds the unique
+			script operator ID in the user's transform function (the custom
+			mapper/reducer that the user has specified in the query)
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.output</name>
+		<value>false</value>
+		<description> This controls whether the final outputs of a query (to a
+			local/hdfs file or a hive table) is compressed. The compression codec
+			and other options are determined from hadoop config variables
+			mapred.output.compress* </description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.intermediate</name>
+		<value>false</value>
+		<description> This controls whether intermediate files produced by
+			hive between multiple map-reduce jobs are compressed. The compression
+			codec and other options are determined from hadoop config variables
+			mapred.output.compress* </description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel</name>
+		<value>false</value>
+		<description>Whether to execute jobs in parallel</description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel.thread.number</name>
+		<value>8</value>
+		<description>How many jobs at most can be executed in parallel
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.war.file</name>
+		<value>lib\hive-hwi-0.7.0.war</value>
+		<description>This sets the path to the HWI war file, relative to
+			${HIVE_HOME}. </description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.host</name>
+		<value>0.0.0.0</value>
+		<description>This is the host address the Hive Web Interface will
+			listen on</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.port</name>
+		<value>9999</value>
+		<description>This is the port the Hive Web Interface will listen on
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.pre.hooks</name>
+		<value></value>
+		<description>Pre Execute Hook for Tests</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapfiles</name>
+		<value>true</value>
+		<description>Merge small files at the end of a map-only job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapredfiles</name>
+		<value>false</value>
+		<description>Merge small files at the end of a map-reduce job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.heartbeat.interval</name>
+		<value>1000</value>
+		<description>Send a heartbeat after this interval - used by mapjoin
+			and filter operators</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.per.task</name>
+		<value>256000000</value>
+		<description>Size of merged files at the end of the job</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.smallfiles.avgsize</name>
+		<value>16000000</value>
+		<description>When the average output file size of a job is less than
+			this number, Hive will start an additional map-reduce job to merge
+			the output files into bigger files. This is only done for map-only
+			jobs if hive.merge.mapfiles is true, and for map-reduce jobs if
+			hive.merge.mapredfiles is true.</description>
+	</property>
+
+	<property>
+		<name>hive.script.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive Tranform/Map/Reduce Clause should
+			automatically send progress information to TaskTracker to avoid the
+			task getting killed because of inactivity. Hive sends progress
+			information when the script is outputting to stderr. This option
+			removes the need of periodically producing stderr messages, but users
+			should be cautious because this may prevent infinite loops in the
+			scripts to be killed by TaskTracker.  </description>
+	</property>
+
+	<property>
+		<name>hive.script.serde</name>
+		<value>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</value>
+		<description>The default serde for trasmitting input data to and
+			reading output data from the user scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.script.recordreader</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordReader</value>
+		<description>The default record reader for reading data from the user
+			scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.script.recordwriter</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordWriter</value>
+		<description>The default record writer for writing data to the user
+			scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.input.format</name>
+		<value>org.apache.hadoop.hive.ql.io.HiveInputFormat</value>
+		<description>The default input format, if it is not specified, the
+			system assigns it. It is set to HiveInputFormat for hadoop versions
+			17, 18 and 19, whereas it is set to CombinedHiveInputFormat for
+			hadoop 20. The user can always overwrite it - if there is a bug in
+			CombinedHiveInputFormat, it can always be manually set to
+			HiveInputFormat. </description>
+	</property>
+
+	<property>
+		<name>hive.udtf.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive should automatically send progress
+			information to TaskTracker when using UDTF's to prevent the task
+			getting killed because of inactivity. Users should be cautious
+			because this may prevent TaskTracker from killing tasks with infinte
+			loops.  </description>
+	</property>
+
+	<property>
+		<name>hive.mapred.reduce.tasks.speculative.execution</name>
+		<value>true</value>
+		<description>Whether speculative execution for reducers should be
+			turned on. </description>
+	</property>
+
+	<property>
+		<name>hive.exec.counters.pull.interval</name>
+		<value>1000</value>
+		<description>The interval with which to poll the JobTracker for the
+			counters the running job. The smaller it is the more load there will
+			be on the jobtracker, the higher it is the less granular the caught
+			will be.</description>
+	</property>
+
+	<property>
+		<name>hive.enforce.bucketing</name>
+		<value>false</value>
+		<description>Whether bucketing is enforced. If true, while inserting
+			into the table, bucketing is enforced. </description>
+	</property>
+
+	<property>
+		<name>hive.enforce.sorting</name>
+		<value>false</value>
+		<description>Whether sorting is enforced. If true, while inserting
+			into the table, sorting is enforced. </description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.connection.url.hook</name>
+		<value></value>
+		<description>Name of the hook to use for retriving the JDO connection
+			URL. If empty, the value in javax.jdo.option.ConnectionURL is used
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.attempts</name>
+		<value>1</value>
+		<description>The number of times to retry a metastore call if there
+			were a connection error</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.interval</name>
+		<value>1000</value>
+		<description>The number of miliseconds between metastore retry
+			attempts</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.min.threads</name>
+		<value>200</value>
+		<description>Minimum number of worker threads in the Thrift server's
+			pool.</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.max.threads</name>
+		<value>100000</value>
+		<description>Maximum number of worker threads in the Thrift server's
+			pool.</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.tcp.keepalive</name>
+		<value>true</value>
+		<description>Whether to enable TCP keepalive for the metastore server.
+			Keepalive will prevent accumulation of half-open connections.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.reducededuplication</name>
+		<value>true</value>
+		<description>Remove extra map-reduce jobs if the data is already
+			clustered by the same key which needs to be used again. This should
+			always be set to true. Since it is a new feature, it has been made
+			configurable.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition</name>
+		<value>false</value>
+		<description>Whether or not to allow dynamic partitions in DML/DDL.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition.mode</name>
+		<value>strict</value>
+		<description>In strict mode, the user must specify at least one static
+			partition in case the user accidentally overwrites all partitions.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions</name>
+		<value>1000</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in total.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions.pernode</name>
+		<value>100</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in each mapper/reducer node.</description>
+	</property>
+
+	<property>
+		<name>hive.default.partition.name</name>
+		<value>__HIVE_DEFAULT_PARTITION__</value>
+		<description>The default partition name in case the dynamic partition
+			column value is null/empty string or anyother values that cannot be
+			escaped. This value must not contain any special character used in
+			HDFS URI (e.g., ':', '%', '/' etc). The user has to be aware that the
+			dynamic partition value should not contain this value to avoid
+			confusions.</description>
+	</property>
+
+	<property>
+		<name>fs.har.impl</name>
+		<value>org.apache.hadoop.hive.shims.HiveHarFileSystem</value>
+		<description>The implementation for accessing Hadoop Archives. Note
+			that this won't be applicable to Hadoop vers less than 0.20
+		</description>
+	</property>
+
+	<property>
+		<name>hive.archive.enabled</name>
+		<value>false</value>
+		<description>Whether archiving operations are permitted</description>
+	</property>
+
+	<property>
+		<name>hive.archive.har.parentdir.settable</name>
+		<value>false</value>
+		<description>In new Hadoop versions, the parent directory must be set
+			while
+			creating a HAR. Because this functionality is hard to detect with just
+			version
+			numbers, this conf var needs to be set manually.</description>
+	</property>
+
+	<!-- HBase Storage Handler Parameters -->
+
+	<property>
+		<name>hive.hbase.wal.enabled</name>
+		<value>true</value>
+		<description>Whether writes to HBase should be forced to the
+			write-ahead log. Disabling this improves HBase write performance at
+			the risk of lost writes in case of a crash.</description>
+	</property>
+
+</configuration>
diff --git a/hivesterix/hivesterix-dist/src/main/resources/conf/hive-log4j.properties b/hivesterix/hivesterix-dist/src/main/resources/conf/hive-log4j.properties
new file mode 100644
index 0000000..784a274
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/conf/hive-log4j.properties
@@ -0,0 +1,58 @@
+#------------------------------------------------------------------------------
+#
+#  The following properties set the logging levels and log appender.  The
+#  log4j.rootCategory variable defines the default log level and one or more
+#  appenders.  For the console, use 'S'.  For the daily rolling file, use 'R'.
+#  For an HTML formatted log, use 'H'.
+#
+#  To override the default (rootCategory) log level, define a property of the
+#  form (see below for available values):
+#
+#        log4j.logger. =
+#
+#    Available logger names:
+#      TODO
+#
+#    Possible Log Levels:
+#      FATAL, ERROR, WARN, INFO, DEBUG
+#
+#------------------------------------------------------------------------------
+log4j.rootCategory=INFO, S
+
+log4j.logger.com.dappit.Dapper.parser=ERROR
+log4j.logger.org.w3c.tidy=FATAL
+
+#------------------------------------------------------------------------------
+#
+#  The following properties configure the console (stdout) appender.
+#  See http://logging.apache.org/log4j/docs/api/index.html for details.
+#
+#------------------------------------------------------------------------------
+log4j.appender.S = org.apache.log4j.ConsoleAppender
+log4j.appender.S.layout = org.apache.log4j.PatternLayout
+log4j.appender.S.layout.ConversionPattern = %d{yyyy-MM-dd HH:mm:ss} %c{1} [%p] %m%n
+
+#------------------------------------------------------------------------------
+#
+#  The following properties configure the Daily Rolling File appender.
+#  See http://logging.apache.org/log4j/docs/api/index.html for details.
+#
+#------------------------------------------------------------------------------
+log4j.appender.R = org.apache.log4j.DailyRollingFileAppender
+log4j.appender.R.File = logs/bensApps.log
+log4j.appender.R.Append = true
+log4j.appender.R.DatePattern = '.'yyy-MM-dd
+log4j.appender.R.layout = org.apache.log4j.PatternLayout
+log4j.appender.R.layout.ConversionPattern = %d{yyyy-MM-dd HH:mm:ss} %c{1} [%p] %m%n
+
+#------------------------------------------------------------------------------
+#
+#  The following properties configure the Rolling File appender in HTML.
+#  See http://logging.apache.org/log4j/docs/api/index.html for details.
+#
+#------------------------------------------------------------------------------
+log4j.appender.H = org.apache.log4j.RollingFileAppender
+log4j.appender.H.File = logs/bensApps.html
+log4j.appender.H.MaxFileSize = 100KB
+log4j.appender.H.Append = false
+log4j.appender.H.layout = org.apache.log4j.HTMLLayout
diff --git a/hivesterix/hivesterix-dist/src/main/resources/conf/master b/hivesterix/hivesterix-dist/src/main/resources/conf/master
new file mode 100644
index 0000000..2fbb50c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/conf/master
@@ -0,0 +1 @@
+localhost
diff --git a/hivesterix/hivesterix-dist/src/main/resources/conf/slaves b/hivesterix/hivesterix-dist/src/main/resources/conf/slaves
new file mode 100644
index 0000000..2fbb50c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/conf/slaves
@@ -0,0 +1 @@
+localhost
diff --git a/hivesterix/hivesterix-dist/src/main/resources/conf/topology-template.xml b/hivesterix/hivesterix-dist/src/main/resources/conf/topology-template.xml
new file mode 100755
index 0000000..4710706
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/conf/topology-template.xml
@@ -0,0 +1,7 @@
+<cluster-topology>
+    <network-switch name="Global">
+        <network-switch name="local">
+            <terminal name="127.0.0.1"/>
+        </network-switch>
+    </network-switch>
+</cluster-topology>
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/cli.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/cli.sh
new file mode 100644
index 0000000..914aae3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/cli.sh
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=cli
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+cli () {
+  CLASS=org.apache.hadoop.hive.cli.CliDriver
+  execHiveCmd $CLASS "$@"
+}
+
+cli_help () {
+  CLASS=org.apache.hadoop.hive.cli.CliDriver
+  execHiveCmd $CLASS "--help"
+} 
+
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/help.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/help.sh
new file mode 100644
index 0000000..432859a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/help.sh
@@ -0,0 +1,36 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=help
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+help() {
+  echo "Usage ./hive <parameters> --service serviceName <service parameters>"
+  echo "Service List: $SERVICE_LIST"
+  echo "Parameters parsed:"
+  echo "  --auxpath : Auxillary jars "
+  echo "  --config : Hive configuration directory"
+  echo "  --service : Starts specific service/component. cli is default"
+  echo "Parameters used:"
+  echo "  HADOOP_HOME : Hadoop install directory"
+  echo "  HIVE_OPT : Hive options"
+  echo "For help on a particular service:"
+  echo "  ./hive --service serviceName --help"
+}
+
+help_help(){
+  help
+}
+
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/hiveserver.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/hiveserver.sh
new file mode 100644
index 0000000..b5edce4
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/hiveserver.sh
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=hiveserver
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+hiveserver() {
+  echo "Starting Hive Thrift Server"
+  CLASS=org.apache.hadoop.hive.service.HiveServer
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+  JAR=${HIVE_LIB}/hive-service-*.jar
+
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar $JAR $CLASS $HIVE_PORT "$@"
+}
+
+hiveserver_help() {
+  echo "usage HIVE_PORT=xxxx ./hive --service hiveserver" 
+  echo "  HIVE_PORT : Specify the server port"
+}
+
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/hwi.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/hwi.sh
new file mode 100644
index 0000000..f9cd8ec
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/hwi.sh
@@ -0,0 +1,50 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=hwi
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+hwi() {
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  CLASS=org.apache.hadoop.hive.hwi.HWIServer
+  # The ls hack forces the * to be expanded which is required because 
+  # System.getenv doesn't do globbing
+  export HWI_JAR_FILE=$(ls ${HIVE_LIB}/hive-hwi-*.jar)
+  export HWI_WAR_FILE=$(ls ${HIVE_LIB}/hive-hwi-*.war)
+
+  #hwi requires ant jars
+  if [ "$ANT_LIB" = "" ] ; then
+    ANT_LIB=/opt/ant/lib
+  fi
+  for f in ${ANT_LIB}/*.jar; do
+    if [[ ! -f $f ]]; then
+      continue;
+    fi
+    HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$f
+  done
+
+  export HADOOP_CLASSPATH
+  
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar ${HWI_JAR_FILE} $CLASS $HIVE_OPTS "$@"
+}
+
+hwi_help(){
+  echo "Usage ANT_LIB=XXXX hive --service hwi"	
+}
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/jar.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/jar.sh
new file mode 100644
index 0000000..b52f9a7
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/jar.sh
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=jar
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+jar () {
+  RUNJAR=$1
+  shift
+
+  RUNCLASS=$1
+  shift
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  if [ -z "$RUNJAR" ] ; then
+    echo "RUNJAR not specified"
+    exit 3
+  fi
+
+  if [ -z "$RUNCLASS" ] ; then
+    echo "RUNCLASS not specified"
+    exit 3
+  fi
+
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar $RUNJAR $RUNCLASS $HIVE_OPTS "$@"
+}
+
+jar_help () {
+  echo "Used for applications that require Hadoop and Hive classpath and environment."
+  echo "./hive --service jar <yourjar> <yourclass> HIVE_OPTS <your_args>"
+}
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/lineage.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/lineage.sh
new file mode 100644
index 0000000..993bc8d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/lineage.sh
@@ -0,0 +1,38 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=lineage
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+lineage () {
+  CLASS=org.apache.hadoop.hive.ql.tools.LineageInfo
+
+  # cli specific code
+  if [ ! -f ${HIVE_LIB}/hive-exec-*.jar ]; then
+    echo "Missing Hive exec Jar"
+    exit 3;
+  fi
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  exec $HADOOP jar ${HIVE_LIB}/hive-exec-*.jar $CLASS  "$@"
+}
+
+lineage_help () {
+  echo "usage ./hive 'hql' "
+} 
+
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/metastore.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/metastore.sh
new file mode 100644
index 0000000..db15f6e
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/metastore.sh
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=metastore
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+metastore() {
+  echo "Starting Hive Metastore Server"
+  CLASS=org.apache.hadoop.hive.metastore.HiveMetaStore
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+  JAR=${HIVE_LIB}/hive-service-*.jar
+
+  # hadoop 20 or newer - skip the aux_jars option and hiveconf
+  exec $HADOOP jar $JAR $CLASS $METASTORE_PORT "$@"
+}
+
+metastore_help() {
+  echo "usage METASTORE_PORT=xxxx ./hive --service metastore"
+  echo "  METASTORE_PORT : Specify the metastore server port"
+}
+
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/rcfilecat.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/rcfilecat.sh
new file mode 100644
index 0000000..3a9264b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/rcfilecat.sh
@@ -0,0 +1,27 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+THISSERVICE=rcfilecat
+export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
+
+rcfilecat () {
+  CLASS=org.apache.hadoop.hive.cli.RCFileCat
+  HIVE_OPTS=''
+  execHiveCmd $CLASS "$@"
+}
+
+rcfilecat_help () {
+  echo "usage ./hive rcfilecat [--start='startoffset'] [--length='len'] "
+} 
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/util/execHiveCmd.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/util/execHiveCmd.sh
new file mode 100644
index 0000000..167cc40
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/ext/util/execHiveCmd.sh
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+execHiveCmd () {
+  CLASS=$1;
+  shift;
+
+  # cli specific code
+  if [ ! -f ${HIVE_LIB}/hive-cli-*.jar ]; then
+    echo "Missing Hive CLI Jar"
+    exit 3;
+  fi
+
+  if $cygwin; then
+    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
+  fi
+
+  # hadoop 20 or newer - skip the aux_jars option. picked up from hiveconf
+  exec $HADOOP jar ${HIVE_LIB}/hive-cli-*.jar $CLASS $HIVE_OPTS "$@"
+}
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/getip.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/getip.sh
new file mode 100755
index 0000000..8c9ae76
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/getip.sh
@@ -0,0 +1,25 @@
+#get the OS
+OS_NAME=`uname -a|awk '{print $1}'`
+LINUX_OS='Linux'
+
+if [ $OS_NAME = $LINUX_OS ];
+then
+        #Get IP Address
+        IPADDR=`/sbin/ifconfig eth0 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+	if [ "$IPADDR" = "" ]
+        then
+                IPADDR=`/sbin/ifconfig em1 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+        fi
+	if [ "$IPADDR" = "" ]
+        then
+		IPADDR=`/sbin/ifconfig lo | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+        fi 
+else
+        IPADDR=`/sbin/ifconfig en1 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+	if [ "$IPADDR" = "" ]
+        then
+                IPADDR=`/sbin/ifconfig lo0 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+        fi
+
+fi
+echo $IPADDR
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/hive b/hivesterix/hivesterix-dist/src/main/resources/scripts/hive
new file mode 100755
index 0000000..f98f340
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/hive
@@ -0,0 +1,213 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+cygwin=false
+case "`uname`" in
+   CYGWIN*) cygwin=true;;
+esac
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hive-config.sh
+
+SERVICE=""
+HELP=""
+while [ $# -gt 0 ]; do
+  case "$1" in
+    --service)
+      shift
+      SERVICE=$1
+      shift
+      ;;
+    --rcfilecat)
+      SERVICE=rcfilecat
+      shift
+      ;;
+    --help)
+      HELP=_help
+      shift
+      ;;
+    *)
+      break
+      ;;
+  esac
+done
+
+if [ "$SERVICE" = "" ] ; then
+  if [ "$HELP" = "_help" ] ; then
+    SERVICE="help"
+  else
+    SERVICE="cli"
+  fi
+fi
+
+if [ -f "${HIVE_CONF_DIR}/hive-env.sh" ]; then
+  . "${HIVE_CONF_DIR}/hive-env.sh"
+fi
+
+CLASSPATH="${HIVE_CONF_DIR}"
+
+HIVE_LIB=${HIVE_HOME}/lib
+
+# needed for execution
+if [ ! -f ${HIVE_LIB}/hive-exec-*.jar ]; then
+  echo "Missing Hive Execution Jar: ${HIVE_LIB}/hive-exec-*.jar"
+  exit 1;
+fi
+
+if [ ! -f ${HIVE_LIB}/hive-metastore-*.jar ]; then
+  echo "Missing Hive MetaStore Jar"
+  exit 2;
+fi
+
+# cli specific code
+if [ ! -f ${HIVE_LIB}/hive-cli-*.jar ]; then
+  echo "Missing Hive CLI Jar"
+  exit 3;
+fi
+
+CLASSPATH=${CLASSPATH}:${HIVE_LIB}/a-hive-path.jar
+
+for f in ${HIVE_LIB}/*.jar; do
+  CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add the auxillary jars such as serdes
+if [ -d "${HIVE_AUX_JARS_PATH}" ]; then
+  for f in ${HIVE_AUX_JARS_PATH}/*.jar; do
+    if [[ ! -f $f ]]; then
+        continue;
+    fi
+    if $cygwin; then
+	f=`cygpath -w "$f"`
+    fi
+    AUX_CLASSPATH=${AUX_CLASSPATH}:$f
+    if [ "${AUX_PARAM}" == "" ]; then
+        AUX_PARAM=file://$f
+    else
+        AUX_PARAM=${AUX_PARAM},file://$f;
+    fi
+  done
+elif [ "${HIVE_AUX_JARS_PATH}" != "" ]; then 
+  if $cygwin; then
+      HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/,/:/g'`
+      HIVE_AUX_JARS_PATH=`cygpath -p -w "$HIVE_AUX_JARS_PATH"`
+      HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/;/,/g'`
+  fi
+  AUX_CLASSPATH=${HIVE_AUX_JARS_PATH}
+  AUX_PARAM=file://${HIVE_AUX_JARS_PATH}
+  AUX_PARAM=`echo $AUX_PARAM | sed 's/,/,file:\/\//g'`
+fi
+
+# adding jars from auxlib directory
+for f in ${HIVE_HOME}/auxlib/*.jar; do
+  if [[ ! -f $f ]]; then
+      continue;
+  fi
+  if $cygwin; then
+      f=`cygpath -w "$f"`
+  fi
+  AUX_CLASSPATH=${AUX_CLASSPATH}:$f
+  if [ "${AUX_PARAM}" == "" ]; then
+    AUX_PARAM=file://$f
+  else
+    AUX_PARAM=${AUX_PARAM},file://$f;
+  fi
+done
+if $cygwin; then
+    CLASSPATH=`cygpath -p -w "$CLASSPATH"`
+    CLASSPATH=${CLASSPATH};${AUX_CLASSPATH}
+else
+    CLASSPATH=${CLASSPATH}:${AUX_CLASSPATH}
+fi
+
+# pass classpath to hadoop
+export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${CLASSPATH}"
+
+# check for hadoop in the path
+HADOOP_IN_PATH=`which hadoop 2>/dev/null`
+if [ -f ${HADOOP_IN_PATH} ]; then
+  HADOOP_DIR=`dirname "$HADOOP_IN_PATH"`/..
+fi
+# HADOOP_HOME env variable overrides hadoop in the path
+HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DIR}
+if [ "$HADOOP_HOME" == "" ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+HADOOP=$HADOOP_HOME/bin/hadoop
+if [ ! -f ${HADOOP} ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+# Make sure we're using a compatible version of Hadoop
+hadoop_version=$($HADOOP version | awk '{if (NR == 1) {print $2;}}');
+
+# Save the regex to a var to workaround quoting incompatabilities
+# between Bash 3.1 and 3.2
+hadoop_version_re="^([[:digit:]]+)\.([[:digit:]]+)(\.([[:digit:]]+))?.*$"
+
+if [[ "$hadoop_version" =~ $hadoop_version_re ]]; then
+    hadoop_major_ver=${BASH_REMATCH[1]}
+    hadoop_minor_ver=${BASH_REMATCH[2]}
+    hadoop_patch_ver=${BASH_REMATCH[4]}
+else
+    echo "Unable to determine Hadoop version information."
+    echo "'hadoop version' returned:"
+    echo `$HADOOP version`
+    exit 5
+fi
+
+if [ $hadoop_minor_ver -ne 20 -o $hadoop_patch_ver -eq 0 ]; then
+    echo "Hive requires Hadoop 0.20.x (x >= 1)."
+    echo "'hadoop version' returned:"
+    echo `$HADOOP version`
+    exit 6
+fi
+
+if [ "${AUX_PARAM}" != "" ]; then
+  HIVE_OPTS="$HIVE_OPTS -hiveconf hive.aux.jars.path=${AUX_PARAM}"
+  AUX_JARS_CMD_LINE="-libjars ${AUX_PARAM}"
+fi
+
+SERVICE_LIST=""
+
+for i in "$bin"/ext/*.sh ; do
+  . $i
+done
+
+for i in "$bin"/ext/util/*.sh ; do
+  . $i
+done
+
+TORUN=""
+for j in $SERVICE_LIST ; do
+  if [ "$j" = "$SERVICE" ] ; then
+    TORUN=${j}$HELP
+  fi
+done
+
+if [ "$TORUN" = "" ] ; then
+  echo "Service $SERVICE not found"
+  echo "Available Services: $SERVICE_LIST"
+  exit 7
+else
+  $TORUN "$@"
+fi
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/hive-config.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/hive-config.sh
new file mode 100755
index 0000000..2524bbc
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/hive-config.sh
@@ -0,0 +1,68 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# processes --config option from command line
+#
+
+this="$0"
+while [ -h "$this" ]; do
+  ls=`ls -ld "$this"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '.*/.*' > /dev/null; then
+    this="$link"
+  else
+    this=`dirname "$this"`/"$link"
+  fi
+done
+
+# convert relative path to absolute path
+bin=`dirname "$this"`
+script=`basename "$this"`
+bin=`cd "$bin"; pwd`
+this="$bin/$script"
+
+# the root of the Hadoop installation
+export HIVE_HOME=`dirname "$bin"`
+
+#check to see if the conf dir is given as an optional argument
+while [ $# -gt 0 ]; do    # Until you run out of parameters . . .
+  case "$1" in
+    --config)
+        shift
+        confdir=$1
+        shift
+        HIVE_CONF_DIR=$confdir
+        ;;
+    --auxpath)
+        shift
+        HIVE_AUX_JARS_PATH=$1
+        shift
+        ;;
+    *)
+        break;
+        ;;
+  esac
+done
+
+
+# Allow alternate conf dir location.
+HIVE_CONF_DIR="${HIVE_CONF_DIR:-$HIVE_HOME/conf}"
+
+export HIVE_CONF_DIR=$HIVE_CONF_DIR
+export HIVE_AUX_JARS_PATH=$HIVE_AUX_JARS_PATH
+
+# Default to use 256MB 
+export HADOOP_HEAPSIZE=${HADOOP_HEAPSIZE:-256}
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/init-hive-dfs.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/init-hive-dfs.sh
new file mode 100755
index 0000000..ec3997a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/init-hive-dfs.sh
@@ -0,0 +1,107 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The purpose of this script is to set warehouse's directories on HDFS
+
+DEFAULT_WAREHOUSE_DIR="/user/hive/warehouse"
+DEFAULT_TMP_DIR="/tmp"
+
+WAREHOUSE_DIR=${DEFAULT_WAREHOUSE_DIR}
+TMP_DIR=${DEFAULT_TMP_DIR}
+HELP=""
+while [ $# -gt 0 ]; do
+  case "$1" in
+    --warehouse-dir)
+      shift
+      WAREHOUSE_DIR=$1
+      shift
+      ;;
+    --tmp-dir)
+      shift
+      TMP_DIR=$1
+      shift
+      ;;
+    --help)
+      HELP=_help
+      shift
+      ;;
+    *)
+      echo "Invalid parameter: $1"
+      HELP=_help
+      break
+      ;;
+  esac
+done
+
+if [ "$HELP" = "_help" ] ; then
+  echo "Usage $0 [--warehouse-dir <Hive user>] [--tmp-dir <Tmp dir>]"
+  echo "Default value of warehouse directory is: [$DEFAULT_WAREHOUSE_DIR]"
+  echo "Default value of the temporary directory is: [$DEFAULT_TMP_DIR]"
+  exit -1
+fi
+
+
+# check for hadoop in the path
+HADOOP_IN_PATH=`which hadoop 2>/dev/null`
+if [ -f ${HADOOP_IN_PATH} ]; then
+  HADOOP_DIR=`dirname "$HADOOP_IN_PATH"`/..
+fi
+# HADOOP_HOME env variable overrides hadoop in the path
+HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DIR}
+if [ "$HADOOP_HOME" == "" ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+HADOOP_EXEC=$HADOOP_HOME/bin/hadoop
+if [ ! -f ${HADOOP} ]; then
+  echo "Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop must be in the path";
+  exit 4;
+fi
+
+
+# Ensure /tmp exist
+$HADOOP_EXEC fs -test -d ${TMP_DIR} > /dev/null 2>&1
+if [ $? -ne 0 ] 
+then
+  echo "Creating directory [${TMP_DIR}]"
+  $HADOOP_EXEC fs -mkdir ${TMP_DIR}
+fi
+
+echo "Setting writeable group rights for directory [${TMP_DIR}]"
+$HADOOP_EXEC fs -chmod g+w ${TMP_DIR}
+
+
+# Ensure warehouse dir exist
+$HADOOP_EXEC fs -test -d ${WAREHOUSE_DIR} > /dev/null 2>&1
+if [ $? -ne 0 ] 
+then
+  echo "Creating directory [${WAREHOUSE_DIR}]"
+  $HADOOP_EXEC fs -mkdir ${WAREHOUSE_DIR}
+fi
+
+echo "Setting writeable group rights for directory [${WAREHOUSE_DIR}]"
+$HADOOP_EXEC fs -chmod g+w ${WAREHOUSE_DIR}
+
+echo "Initialization done."
+echo
+echo "Please, do not forget to set the following configuration properties in hive-site.xml:"
+echo "hive.metastore.warehouse.dir=${WAREHOUSE_DIR}"
+echo "hive.exec.scratchdir=${TMP_DIR}"
+
+exit 0
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/startAllNCs.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/startAllNCs.sh
new file mode 100644
index 0000000..d30da26
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/startAllNCs.sh
@@ -0,0 +1,6 @@
+PREGELIX_PATH=`pwd`
+
+for i in `cat conf/slaves`
+do
+   ssh $i "cd ${PREGELIX_PATH}; export JAVA_HOME=${JAVA_HOME}; bin/startnc.sh"
+done
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/startCluster.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/startCluster.sh
new file mode 100644
index 0000000..6aa9161
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/startCluster.sh
@@ -0,0 +1,19 @@
+bin/startcc.sh
+sleep 5
+bin/startAllNCs.sh
+
+. conf/cluster.properties
+# do we need to specify the version somewhere?
+hyrackcmd=`ls ${HYRACKS_HOME}/hyracks-cli/target/hyracks-cli-*-binary-assembly/bin/hyrackscli`
+# find zip file
+appzip=`ls $PWD/../hivesterix-dist-*-binary-assembly.zip`
+
+[ -f $hyrackcmd ] || { echo "Hyracks commandline is missing"; exit -1;}
+[ -f $appzip ] || { echo "Genomix binary-assembly.zip is missing"; exit -1;}
+
+CCHOST_NAME=`cat conf/master`
+
+IPADDR=`bin/getip.sh`
+echo "connect to \"${IPADDR}:${CC_CLIENTPORT}\"; create application hivesterix \"$appzip\";" | $hyrackcmd 
+echo ""
+
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/startDebugNc.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/startDebugNc.sh
new file mode 100755
index 0000000..fe6cf27
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/startDebugNc.sh
@@ -0,0 +1,50 @@
+hostname
+
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CURRENT_PATH=`pwd`
+CCHOST=`ssh ${CCHOST_NAME} "cd ${CURRENT_PATH}; bin/getip.sh"`
+
+#Import cluster properties
+. conf/cluster.properties
+. conf/debugnc.properties
+
+#Clean up temp dir
+
+rm -rf $NCTMP_DIR2
+mkdir $NCTMP_DIR2
+
+#Clean up log dir
+rm -rf $NCLOGS_DIR2
+mkdir $NCLOGS_DIR2
+
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS2 | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir
+	mkdir $io_dir
+done
+
+#Set JAVA_HOME
+export JAVA_HOME=$JAVA_HOME
+
+#Get OS
+IPADDR=`bin/getip.sh`
+
+#Get node ID
+NODEID=`hostname | cut -d '.' -f 1`
+NODEID=${NODEID}2
+
+#Set JAVA_OPTS
+export JAVA_OPTS=$NCJAVA_OPTS2
+
+cd $HYRACKS_HOME
+HYRACKS_HOME=`pwd`
+
+#Enter the temp dir
+cd $NCTMP_DIR2
+
+#Launch hyracks nc
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR  -data-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS2}" &> $NCLOGS_DIR2/$NODEID.log &
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/startcc.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/startcc.sh
new file mode 100644
index 0000000..484ecac
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/startcc.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+hostname
+
+#Import cluster properties
+. conf/cluster.properties
+
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CCHOST=`bin/getip.sh`
+
+#Remove the temp dir
+rm -rf $CCTMP_DIR
+mkdir $CCTMP_DIR
+
+#Remove the logs dir
+rm -rf $CCLOGS_DIR
+mkdir $CCLOGS_DIR
+
+#Export JAVA_HOME and JAVA_OPTS
+export JAVA_HOME=$JAVA_HOME
+export JAVA_OPTS=$CCJAVA_OPTS
+
+#Launch hyracks cc script
+chmod -R 755 $HYRACKS_HOME
+if [ -f "conf/topology.xml"  ]; then
+#Launch hyracks cc script with topology
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 -cluster-topology "conf/topology.xml" &> $CCLOGS_DIR/cc.log &
+else
+#Launch hyracks cc script without toplogy
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 &> $CCLOGS_DIR/cc.log &
+fi
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/startnc.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/startnc.sh
new file mode 100644
index 0000000..23a4c36
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/startnc.sh
@@ -0,0 +1,49 @@
+hostname
+
+MY_NAME=`hostname`
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CURRENT_PATH=`pwd`
+CCHOST=`ssh ${CCHOST_NAME} "cd ${CURRENT_PATH}; bin/getip.sh"`
+
+#Import cluster properties
+. conf/cluster.properties
+
+#Clean up temp dir
+
+rm -rf $NCTMP_DIR
+mkdir $NCTMP_DIR
+
+#Clean up log dir
+rm -rf $NCLOGS_DIR
+mkdir $NCLOGS_DIR
+
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir
+	mkdir $io_dir
+done
+
+#Set JAVA_HOME
+export JAVA_HOME=$JAVA_HOME
+
+IPADDR=`bin/getip.sh`
+#echo $IPADDR
+
+#Get node ID
+NODEID=`hostname | cut -d '.' -f 1`
+
+#Set JAVA_OPTS
+export JAVA_OPTS=$NCJAVA_OPTS
+
+cd $HYRACKS_HOME
+HYRACKS_HOME=`pwd`
+
+#Enter the temp dir
+cd $NCTMP_DIR
+
+#Launch hyracks nc
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR  -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopAllNCs.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopAllNCs.sh
new file mode 100644
index 0000000..12367c1
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopAllNCs.sh
@@ -0,0 +1,6 @@
+PREGELIX_PATH=`pwd`
+
+for i in `cat conf/slaves`
+do
+   ssh $i "cd ${PREGELIX_PATH}; bin/stopnc.sh"
+done
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopCluster.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopCluster.sh
new file mode 100644
index 0000000..4889934
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopCluster.sh
@@ -0,0 +1,3 @@
+bin/stopAllNCs.sh
+sleep 2
+bin/stopcc.sh
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopcc.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopcc.sh
new file mode 100644
index 0000000..c2f525a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopcc.sh
@@ -0,0 +1,10 @@
+hostname
+. conf/cluster.properties
+
+#Kill process
+PID=`ps -ef|grep ${USER}|grep java|grep hyracks|awk '{print $2}'`
+echo $PID
+kill -9 $PID
+
+#Clean up CC temp dir
+rm -rf $CCTMP_DIR/*
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopnc.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopnc.sh
new file mode 100644
index 0000000..35c4794
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopnc.sh
@@ -0,0 +1,27 @@
+hostname
+. conf/cluster.properties
+
+#Kill process
+PID=`ps -ef|grep ${USER}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+
+if [ "$PID" == "" ]; then
+  PID=`ps -ef|grep ${USER}|grep java|grep 'hyracks'|awk '{print $2}'`
+fi
+
+if [ "$PID" == "" ]; then
+  USERID=`id | sed 's/^uid=//;s/(.*$//'`
+  PID=`ps -ef|grep ${USERID}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+fi
+
+echo $PID
+kill -9 $PID
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir/*
+done
+
+#Clean up NC temp dir
+rm -rf $NCTMP_DIR/*
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestCase.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestCase.java
new file mode 100644
index 0000000..aa38fe7
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestCase.java
@@ -0,0 +1,142 @@
+package edu.uci.ics.hivesterix.perf;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.common.config.ConfUtil;
+import edu.uci.ics.hivesterix.perf.base.AbstractPerfTestCase;
+
+public class PerfTestCase extends AbstractPerfTestCase {
+    private File resultFile;
+    private FileSystem dfs;
+
+    PerfTestCase(File queryFile, File resultFile) {
+        super("testRuntimeFunction", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
+
+    @Test
+    public void testRuntimeFunction() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
+
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
+
+        dfs = FileSystem.get(ConfUtil.getJobConf());
+
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            driver.run(query);
+            driver.clear();
+            i++;
+        }
+
+        String warehouse = hconf.get("hive.metastore.warehouse.dir");
+        String tableName = removeExt(resultFile.getName());
+        String directory = warehouse + "/" + tableName + "/";
+        String localDirectory = "tmp";
+
+        FileStatus[] files = dfs.listStatus(new Path(directory));
+        FileSystem lfs = null;
+        if (files == null) {
+            lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+            files = lfs.listStatus(new Path(directory));
+        }
+
+        File resultDirectory = new File(localDirectory + "/" + tableName);
+        deleteDir(resultDirectory);
+        resultDirectory.mkdir();
+
+        for (FileStatus fs : files) {
+            Path src = fs.getPath();
+            if (src.getName().indexOf("crc") >= 0)
+                continue;
+
+            String destStr = localDirectory + "/" + tableName + "/" + src.getName();
+            Path dest = new Path(destStr);
+            if (lfs != null) {
+                lfs.copyToLocalFile(src, dest);
+                dfs.copyFromLocalFile(dest, new Path(directory));
+            } else
+                dfs.copyToLocalFile(src, dest);
+        }
+
+        File[] rFiles = resultDirectory.listFiles();
+        StringBuilder sb = new StringBuilder();
+        for (File r : rFiles) {
+            if (r.getName().indexOf("crc") >= 0)
+                continue;
+            readFileToString(r, sb);
+        }
+        deleteDir(resultDirectory);
+
+        StringBuilder buf = new StringBuilder();
+        readFileToString(resultFile, buf);
+        if (!equal(buf, sb)) {
+            throw new Exception("Result for " + queryFile + " changed:\n" + sw.toString());
+        }
+    }
+
+    private void deleteDir(File resultDirectory) {
+        if (resultDirectory.exists()) {
+            File[] rFiles = resultDirectory.listFiles();
+            for (File r : rFiles)
+                r.delete();
+            resultDirectory.delete();
+        }
+    }
+
+    private boolean equal(StringBuilder sb1, StringBuilder sb2) {
+        String s1 = sb1.toString();
+        String s2 = sb2.toString();
+        String[] rowsOne = s1.split("\n");
+        String[] rowsTwo = s2.split("\n");
+
+        if (rowsOne.length != rowsTwo.length)
+            return false;
+
+        for (int i = 0; i < rowsOne.length; i++) {
+            String row1 = rowsOne[i];
+            String row2 = rowsTwo[i];
+
+            if (row1.equals(row2))
+                continue;
+
+            String[] fields1 = row1.split("");
+            String[] fields2 = row2.split("");
+
+            for (int j = 0; j < fields1.length; j++) {
+                if (fields1[j].equals(fields2[j])) {
+                    continue;
+                } else if (fields1[j].indexOf('.') < 0) {
+                    return false;
+                } else {
+                    Float float1 = Float.parseFloat(fields1[j]);
+                    Float float2 = Float.parseFloat(fields2[j]);
+
+                    if (Math.abs(float1 - float2) == 0)
+                        continue;
+                    else
+                        return false;
+                }
+            }
+        }
+
+        return true;
+    }
+}
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuite.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuite.java
new file mode 100644
index 0000000..796842d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuite.java
@@ -0,0 +1,74 @@
+package edu.uci.ics.hivesterix.perf;

+

+import java.io.File;

+import java.util.List;

+

+import junit.framework.Test;

+import junit.framework.TestResult;

+import edu.uci.ics.hivesterix.perf.base.AbstractPerfTestSuiteClass;

+

+public class PerfTestSuite extends AbstractPerfTestSuiteClass {

+

+    private static final String PATH_TO_QUERIES = "src/test/resources/perf/queries/";

+    private static final String PATH_TO_RESULTS = "src/test/resources/perf/results/";

+    private static final String PATH_TO_IGNORES = "src/test/resources/perf/ignore.txt";

+

+    private static final String FILE_EXTENSION_OF_RESULTS = "result";

+

+    public static Test suite() throws Exception {

+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+        File testData = new File(PATH_TO_QUERIES);

+        File[] queries = testData.listFiles();

+        PerfTestSuite testSuite = new PerfTestSuite();

+

+        // set hdfs and hyracks cluster, and load test data to hdfs

+        try {

+            testSuite.setup();

+            testSuite.loadData();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+

+        for (File qFile : queries) {

+            if (isIgnored(qFile.getName(), ignores))

+                continue;

+

+            if (qFile.isFile()) {

+                String resultFileName = hiveExtToResExt(qFile.getName());

+                File rFile = new File(PATH_TO_RESULTS + resultFileName);

+                testSuite.addTest(new PerfTestCase(qFile, rFile));

+            }

+        }

+        return testSuite;

+    }

+

+    private static String hiveExtToResExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+    }

+

+    /**

+     * Runs the tests and collects their result in a TestResult.

+     */

+    @Override

+    public void run(TestResult result) {

+

+        int testCount = countTestCases();

+        for (int i = 0; i < testCount; i++) {

+            Test each = this.testAt(i);

+            if (result.shouldStop())

+                break;

+            runTest(each, result);

+        }

+

+        // cleanup hdfs and hyracks cluster

+        try {

+            cleanup();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+    }

+

+}

diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteCaseGenerator.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteCaseGenerator.java
new file mode 100644
index 0000000..4777351
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteCaseGenerator.java
@@ -0,0 +1,99 @@
+package edu.uci.ics.hivesterix.perf;
+
+import java.io.File;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.common.config.ConfUtil;
+import edu.uci.ics.hivesterix.perf.base.AbstractPerfTestCase;
+
+public class PerfTestSuiteCaseGenerator extends AbstractPerfTestCase {
+    private File resultFile;
+    private FileSystem dfs;
+
+    PerfTestSuiteCaseGenerator(File queryFile, File resultFile) {
+        super("testRuntimeFunction", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
+
+    @Test
+    public void testRuntimeFunction() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf);
+        driver.init();
+
+        dfs = FileSystem.get(ConfUtil.getJobConf());
+
+        long startTime = System.currentTimeMillis();
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            driver.run(query);
+            // driver.clear();
+            i++;
+        }
+        long endTime = System.currentTimeMillis();
+        System.out.println(resultFile.getName() + " execution time " + (endTime - startTime));
+
+        String warehouse = hconf.get("hive.metastore.warehouse.dir");
+        String tableName = removeExt(resultFile.getName());
+        String directory = warehouse + "/" + tableName + "/";
+        String localDirectory = "tmp";
+
+        FileStatus[] files = dfs.listStatus(new Path(directory));
+        FileSystem lfs = null;
+        if (files == null) {
+            lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+            files = lfs.listStatus(new Path(directory));
+        }
+
+        File resultDirectory = new File(localDirectory + "/" + tableName);
+        deleteDir(resultDirectory);
+        resultDirectory.mkdir();
+
+        for (FileStatus fs : files) {
+            Path src = fs.getPath();
+            if (src.getName().indexOf("crc") >= 0)
+                continue;
+
+            String destStr = localDirectory + "/" + tableName + "/" + src.getName();
+            Path dest = new Path(destStr);
+            if (lfs != null) {
+                lfs.copyToLocalFile(src, dest);
+                dfs.copyFromLocalFile(dest, new Path(directory));
+            } else
+                dfs.copyToLocalFile(src, dest);
+        }
+
+        File[] rFiles = resultDirectory.listFiles();
+        StringBuilder sb = new StringBuilder();
+        for (File r : rFiles) {
+            if (r.getName().indexOf("crc") >= 0)
+                continue;
+            readFileToString(r, sb);
+        }
+        deleteDir(resultDirectory);
+
+        writeStringToFile(resultFile, sb);
+    }
+
+    private void deleteDir(File resultDirectory) {
+        if (resultDirectory.exists()) {
+            File[] rFiles = resultDirectory.listFiles();
+            for (File r : rFiles)
+                r.delete();
+            resultDirectory.delete();
+        }
+    }
+}
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteGenerator.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteGenerator.java
new file mode 100644
index 0000000..aa38014
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/PerfTestSuiteGenerator.java
@@ -0,0 +1,74 @@
+package edu.uci.ics.hivesterix.perf;

+

+import java.io.File;

+import java.util.List;

+

+import junit.framework.Test;

+import junit.framework.TestResult;

+import edu.uci.ics.hivesterix.perf.base.AbstractPerfTestSuiteClass;

+

+public class PerfTestSuiteGenerator extends AbstractPerfTestSuiteClass {

+

+    private static final String PATH_TO_QUERIES = "src/test/resources/perf/queries/";

+    private static final String PATH_TO_RESULTS = "src/test/resources/perf/results/";

+    private static final String PATH_TO_IGNORES = "src/test/resources/perf/ignore.txt";

+

+    private static final String FILE_EXTENSION_OF_RESULTS = "result";

+

+    public static Test suite() throws Exception {

+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+        File testData = new File(PATH_TO_QUERIES);

+        File[] queries = testData.listFiles();

+        PerfTestSuiteGenerator testSuite = new PerfTestSuiteGenerator();

+

+        // set hdfs and hyracks cluster, and load test data to hdfs

+        try {

+            testSuite.setup();

+            testSuite.loadData();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+

+        for (File qFile : queries) {

+            if (isIgnored(qFile.getName(), ignores))

+                continue;

+

+            if (qFile.isFile() && qFile.getName().startsWith("q18_")) {

+                String resultFileName = hiveExtToResExt(qFile.getName());

+                File rFile = new File(PATH_TO_RESULTS + resultFileName);

+                testSuite.addTest(new PerfTestSuiteCaseGenerator(qFile, rFile));

+            }

+        }

+        return testSuite;

+    }

+

+    private static String hiveExtToResExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+    }

+

+    /**

+     * Runs the tests and collects their result in a TestResult.

+     */

+    @Override

+    public void run(TestResult result) {

+

+        int testCount = countTestCases();

+        for (int i = 0; i < testCount; i++) {

+            Test each = this.testAt(i);

+            if (result.shouldStop())

+                break;

+            runTest(each, result);

+        }

+

+        // cleanup hdfs and hyracks cluster

+        try {

+            cleanup();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+    }

+

+}

diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestCase.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestCase.java
new file mode 100644
index 0000000..7e7db36
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestCase.java
@@ -0,0 +1,49 @@
+package edu.uci.ics.hivesterix.perf.base;

+

+import java.io.BufferedReader;

+import java.io.File;

+import java.io.FileReader;

+import java.io.FileWriter;

+import java.io.PrintWriter;

+import java.io.StringWriter;

+

+import junit.framework.TestCase;

+

+public class AbstractPerfTestCase extends TestCase {

+    protected File queryFile;

+

+    public AbstractPerfTestCase(String testName, File queryFile) {

+        super(testName);

+    }

+

+    protected static void readFileToString(File file, StringBuilder buf) throws Exception {

+        BufferedReader result = new BufferedReader(new FileReader(file));

+        while (true) {

+            String s = result.readLine();

+            if (s == null) {

+                break;

+            } else {

+                buf.append(s);

+                buf.append('\n');

+            }

+        }

+        result.close();

+    }

+

+    protected static void writeStringToFile(File file, StringWriter buf) throws Exception {

+        PrintWriter result = new PrintWriter(new FileWriter(file));

+        result.print(buf);

+        result.close();

+    }

+

+    protected static void writeStringToFile(File file, StringBuilder buf) throws Exception {

+        PrintWriter result = new PrintWriter(new FileWriter(file));

+        result.print(buf);

+        result.close();

+    }

+

+    protected static String removeExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot);

+    }

+}

diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestSuiteClass.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestSuiteClass.java
new file mode 100644
index 0000000..c882742
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/perf/base/AbstractPerfTestSuiteClass.java
@@ -0,0 +1,207 @@
+package edu.uci.ics.hivesterix.perf.base;

+

+import java.io.BufferedReader;

+import java.io.FileNotFoundException;

+import java.io.FileReader;

+import java.io.IOException;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+

+import junit.framework.TestSuite;

+

+import org.apache.hadoop.conf.Configuration;

+import org.apache.hadoop.fs.FileSystem;

+import org.apache.hadoop.fs.Path;

+import org.apache.hadoop.hdfs.MiniDFSCluster;

+import org.apache.hadoop.hive.conf.HiveConf;

+import org.apache.hadoop.hive.ql.session.SessionState;

+import org.apache.hadoop.mapred.JobConf;

+import org.apache.hadoop.mapred.MiniMRCluster;

+

+import edu.uci.ics.hivesterix.common.config.ConfUtil;

+import edu.uci.ics.hyracks.api.client.HyracksConnection;

+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;

+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;

+import edu.uci.ics.hyracks.control.common.controllers.CCConfig;

+import edu.uci.ics.hyracks.control.common.controllers.NCConfig;

+import edu.uci.ics.hyracks.control.nc.NodeControllerService;

+

+@SuppressWarnings("deprecation")

+public abstract class AbstractPerfTestSuiteClass extends TestSuite {

+

+    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/perf/hadoop/conf";

+    private static final String PATH_TO_HIVE_CONF = "src/test/resources/perf/hive/conf/hive-default.xml";

+    private static final String PATH_TO_DATA = "src/test/resources/perf/data/";

+

+    private MiniDFSCluster dfsCluster;

+    private MiniMRCluster mrCluster;

+

+    private JobConf conf = new JobConf();

+    protected FileSystem dfs;

+

+    private int numberOfNC = 2;

+    private ClusterControllerService cc;

+    private Map<String, NodeControllerService> ncs = new HashMap<String, NodeControllerService>();

+

+    /**

+     * setup cluster

+     * 

+     * @throws IOException

+     */

+    protected void setup() throws Exception {

+        setupHdfs();

+        setupHyracks();

+    }

+

+    private void setupHdfs() throws IOException {

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));

+        HiveConf hconf = new HiveConf(SessionState.class);

+        hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+

+        FileSystem lfs = FileSystem.getLocal(new Configuration());

+        lfs.delete(new Path("build"), true);

+        lfs.delete(new Path("metastore_db"), true);

+

+        System.setProperty("hadoop.log.dir", "logs");

+        dfsCluster = new MiniDFSCluster(hconf, numberOfNC, true, null);

+        dfs = dfsCluster.getFileSystem();

+

+        mrCluster = new MiniMRCluster(2, dfs.getUri().toString(), 1);

+        hconf.setVar(HiveConf.ConfVars.HADOOPJT, "localhost:" + mrCluster.getJobTrackerPort());

+        hconf.setInt("mapred.min.split.size", 1342177280);

+

+        conf = new JobConf(hconf);

+        ConfUtil.setJobConf(conf);

+

+        String fsName = conf.get("fs.default.name");

+        hconf.set("hive.metastore.warehouse.dir", fsName.concat("/tmp/hivesterix"));

+        String warehouse = hconf.get("hive.metastore.warehouse.dir");

+        dfs.mkdirs(new Path(warehouse));

+        ConfUtil.setHiveConf(hconf);

+    }

+

+    private void setupHyracks() throws Exception {

+        // read hive conf

+        HiveConf hconf = new HiveConf(SessionState.class);

+        hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+        SessionState.start(hconf);

+        String ipAddress = hconf.get("hive.hyracks.host");

+        int clientPort = Integer.parseInt(hconf.get("hive.hyracks.port"));

+        int clusterPort = clientPort;

+        String applicationName = hconf.get("hive.hyracks.app");

+

+        // start hyracks cc

+        CCConfig ccConfig = new CCConfig();

+        ccConfig.clientNetIpAddress = ipAddress;

+        ccConfig.clientNetPort = clientPort;

+        ccConfig.clusterNetPort = clusterPort;

+        ccConfig.profileDumpPeriod = 1000;

+        ccConfig.heartbeatPeriod = 200000000;

+        ccConfig.maxHeartbeatLapsePeriods = 200000000;

+        cc = new ClusterControllerService(ccConfig);

+        cc.start();

+

+        // start hyracks nc

+        for (int i = 0; i < numberOfNC; i++) {

+            NCConfig ncConfig = new NCConfig();

+            ncConfig.ccHost = ipAddress;

+            ncConfig.clusterNetIPAddress = ipAddress;

+            ncConfig.ccPort = clientPort;

+            ncConfig.dataIPAddress = "127.0.0.1";

+            ncConfig.datasetIPAddress = "127.0.0.1";

+            ncConfig.nodeId = "nc" + i;

+            NodeControllerService nc = new NodeControllerService(ncConfig);

+            nc.start();

+            ncs.put(ncConfig.nodeId, nc);

+        }

+

+        IHyracksClientConnection hcc = new HyracksConnection(ccConfig.clientNetIpAddress, clientPort);

+        hcc.createApplication(applicationName, null);

+    }

+

+    protected void makeDir(String path) throws IOException {

+        dfs.mkdirs(new Path(path));

+    }

+

+    protected void loadFiles(String src, String dest) throws IOException {

+        dfs.copyFromLocalFile(new Path(src), new Path(dest));

+    }

+

+    protected void cleanup() throws Exception {

+        cleanupHdfs();

+        cleanupHyracks();

+    }

+

+    /**

+     * cleanup hdfs cluster

+     */

+    private void cleanupHdfs() throws IOException {

+        dfs.delete(new Path("/"), true);

+        FileSystem.closeAll();

+        dfsCluster.shutdown();

+    }

+

+    /**

+     * cleanup hyracks cluster

+     */

+    private void cleanupHyracks() throws Exception {

+        Iterator<NodeControllerService> iterator = ncs.values().iterator();

+        while (iterator.hasNext()) {

+            NodeControllerService nc = iterator.next();

+            nc.stop();

+        }

+        cc.stop();

+    }

+

+    protected static List<String> getIgnoreList(String ignorePath) throws FileNotFoundException, IOException {

+        BufferedReader reader = new BufferedReader(new FileReader(ignorePath));

+        String s = null;

+        List<String> ignores = new ArrayList<String>();

+        while ((s = reader.readLine()) != null) {

+            ignores.add(s);

+        }

+        reader.close();

+        return ignores;

+    }

+

+    protected static boolean isIgnored(String q, List<String> ignoreList) {

+        for (String ignore : ignoreList) {

+            if (ignore.equals(q)) {

+                return true;

+            }

+        }

+        return false;

+    }

+

+    protected void loadData() throws IOException {

+

+        makeDir("/tpch");

+        makeDir("/tpch/customer");

+        makeDir("/tpch/lineitem");

+        makeDir("/tpch/orders");

+        makeDir("/tpch/part");

+        makeDir("/tpch/partsupp");

+        makeDir("/tpch/supplier");

+        makeDir("/tpch/nation");

+        makeDir("/tpch/region");

+

+        makeDir("/jarod");

+

+        loadFiles(PATH_TO_DATA + "customer.tbl", "/tpch/customer/");

+        loadFiles(PATH_TO_DATA + "lineitem.tbl", "/tpch/lineitem/");

+        loadFiles(PATH_TO_DATA + "orders.tbl", "/tpch/orders/");

+        loadFiles(PATH_TO_DATA + "part.tbl", "/tpch/part/");

+        loadFiles(PATH_TO_DATA + "partsupp.tbl", "/tpch/partsupp/");

+        loadFiles(PATH_TO_DATA + "supplier.tbl", "/tpch/supplier/");

+        loadFiles(PATH_TO_DATA + "nation.tbl", "/tpch/nation/");

+        loadFiles(PATH_TO_DATA + "region.tbl", "/tpch/region/");

+

+        loadFiles(PATH_TO_DATA + "ext-gby.tbl", "/jarod/");

+    }

+

+}

diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractHivesterixTestCase.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractHivesterixTestCase.java
new file mode 100644
index 0000000..ae5fa05
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractHivesterixTestCase.java
@@ -0,0 +1,49 @@
+package edu.uci.ics.hivesterix.test.base;

+

+import java.io.BufferedReader;

+import java.io.File;

+import java.io.FileReader;

+import java.io.FileWriter;

+import java.io.PrintWriter;

+import java.io.StringWriter;

+

+import junit.framework.TestCase;

+

+public class AbstractHivesterixTestCase extends TestCase {

+    protected File queryFile;

+

+    public AbstractHivesterixTestCase(String testName, File queryFile) {

+        super(testName);

+    }

+

+    protected static void readFileToString(File file, StringBuilder buf) throws Exception {

+        BufferedReader result = new BufferedReader(new FileReader(file));

+        while (true) {

+            String s = result.readLine();

+            if (s == null) {

+                break;

+            } else {

+                buf.append(s);

+                buf.append('\n');

+            }

+        }

+        result.close();

+    }

+

+    protected static void writeStringToFile(File file, StringWriter buf) throws Exception {

+        PrintWriter result = new PrintWriter(new FileWriter(file));

+        result.print(buf);

+        result.close();

+    }

+

+    protected static void writeStringToFile(File file, StringBuilder buf) throws Exception {

+        PrintWriter result = new PrintWriter(new FileWriter(file));

+        result.print(buf);

+        result.close();

+    }

+

+    protected static String removeExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot);

+    }

+}

diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractTestSuiteClass.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractTestSuiteClass.java
new file mode 100644
index 0000000..09b632a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/base/AbstractTestSuiteClass.java
@@ -0,0 +1,234 @@
+package edu.uci.ics.hivesterix.test.base;

+

+import java.io.BufferedReader;

+import java.io.File;

+import java.io.FileInputStream;

+import java.io.FileNotFoundException;

+import java.io.FileReader;

+import java.io.IOException;

+import java.io.InputStream;

+import java.io.InputStreamReader;

+import java.net.InetAddress;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+import java.util.Properties;

+

+import junit.framework.TestSuite;

+

+import org.apache.hadoop.conf.Configuration;

+import org.apache.hadoop.fs.FileSystem;

+import org.apache.hadoop.fs.Path;

+import org.apache.hadoop.hdfs.MiniDFSCluster;

+import org.apache.hadoop.hive.conf.HiveConf;

+import org.apache.hadoop.hive.ql.session.SessionState;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hivesterix.common.config.ConfUtil;

+import edu.uci.ics.hyracks.api.client.HyracksConnection;

+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;

+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;

+import edu.uci.ics.hyracks.control.common.controllers.CCConfig;

+import edu.uci.ics.hyracks.control.common.controllers.NCConfig;

+import edu.uci.ics.hyracks.control.nc.NodeControllerService;

+

+@SuppressWarnings("deprecation")

+public abstract class AbstractTestSuiteClass extends TestSuite {

+

+    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/runtimefunctionts/hadoop/conf";

+    private static final String PATH_TO_HIVE_CONF = "src/test/resources/runtimefunctionts/hive/conf/hive-default.xml";

+

+    private static final String PATH_TO_CLUSTER_CONF = "src/test/resources/runtimefunctionts/hive/conf/topology.xml";

+    private static final String PATH_TO_DATA = "src/test/resources/runtimefunctionts/data/";

+

+    private static final String clusterPropertiesPath = "conf/cluster.properties";

+    private static final String masterFilePath = "conf/master";

+

+    private Properties clusterProps;

+    private MiniDFSCluster dfsCluster;

+

+    private JobConf conf = new JobConf();

+    protected FileSystem dfs;

+

+    private int numberOfNC = 2;

+    private ClusterControllerService cc;

+    private Map<String, NodeControllerService> ncs = new HashMap<String, NodeControllerService>();

+

+    /**

+     * setup cluster

+     * 

+     * @throws IOException

+     */

+    protected void setup() throws Exception {

+        setupHdfs();

+        setupHyracks();

+    }

+

+    private void setupHdfs() throws IOException {

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));

+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));

+        HiveConf hconf = new HiveConf(SessionState.class);

+        hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+

+        FileSystem lfs = FileSystem.getLocal(new Configuration());

+        lfs.delete(new Path("build"), true);

+        lfs.delete(new Path("metastore_db"), true);

+

+        System.setProperty("hadoop.log.dir", "logs");

+        dfsCluster = new MiniDFSCluster(hconf, numberOfNC, true, null);

+        dfs = dfsCluster.getFileSystem();

+        conf = new JobConf(hconf);

+        ConfUtil.setJobConf(conf);

+

+        String fsName = conf.get("fs.default.name");

+        hconf.set("hive.metastore.warehouse.dir", fsName.concat("/tmp/hivesterix"));

+        String warehouse = hconf.get("hive.metastore.warehouse.dir");

+        dfs.mkdirs(new Path(warehouse));

+        ConfUtil.setHiveConf(hconf);

+    }

+

+    private void setupHyracks() throws Exception {

+        // read hive conf

+        HiveConf hconf = new HiveConf(SessionState.class);

+        hconf.addResource(new Path(PATH_TO_HIVE_CONF));

+        SessionState.start(hconf);

+        /**

+         * load the properties file if it is not loaded

+         */

+        if (clusterProps == null) {

+            clusterProps = new Properties();

+            InputStream confIn = new FileInputStream(clusterPropertiesPath);

+            clusterProps.load(confIn);

+            confIn.close();

+        }

+        BufferedReader ipReader = new BufferedReader(new InputStreamReader(new FileInputStream(masterFilePath)));

+        String masterNode = ipReader.readLine();

+        ipReader.close();

+        InetAddress[] ips = InetAddress.getAllByName(masterNode);

+        String ipAddress = null;

+        for (InetAddress ip : ips) {

+            if (ip.getAddress().length <= 4) {

+                ipAddress = ip.getHostAddress();

+            }

+        }

+        int clientPort = Integer.parseInt(clusterProps.getProperty("CC_CLIENTPORT"));

+        int netPort = Integer.parseInt(clusterProps.getProperty("CC_CLUSTERPORT"));

+        String applicationName = "hivesterix";

+

+        // start hyracks cc

+        CCConfig ccConfig = new CCConfig();

+        ccConfig.clientNetIpAddress = ipAddress;

+        ccConfig.clientNetPort = clientPort;

+        ccConfig.clusterNetPort = netPort;

+        ccConfig.profileDumpPeriod = 1000;

+        ccConfig.heartbeatPeriod = 200000000;

+        ccConfig.maxHeartbeatLapsePeriods = 200000000;

+        ccConfig.clusterTopologyDefinition = new File(PATH_TO_CLUSTER_CONF);

+        cc = new ClusterControllerService(ccConfig);

+        cc.start();

+

+        // start hyracks nc

+        for (int i = 0; i < numberOfNC; i++) {

+            NCConfig ncConfig = new NCConfig();

+            ncConfig.ccHost = ipAddress;

+            ncConfig.clusterNetIPAddress = ipAddress;

+            ncConfig.ccPort = netPort;

+            ncConfig.dataIPAddress = "127.0.0.1";

+            ncConfig.datasetIPAddress = "127.0.0.1";

+            ncConfig.nodeId = "nc" + i;

+            NodeControllerService nc = new NodeControllerService(ncConfig);

+            nc.start();

+            ncs.put(ncConfig.nodeId, nc);

+        }

+

+        IHyracksClientConnection hcc = new HyracksConnection(ccConfig.clientNetIpAddress, clientPort);

+        hcc.createApplication(applicationName, null);

+    }

+

+    protected void makeDir(String path) throws IOException {

+        dfs.mkdirs(new Path(path));

+    }

+

+    protected void loadFiles(String src, String dest) throws IOException {

+        dfs.copyFromLocalFile(new Path(src), new Path(dest));

+    }

+

+    protected void cleanup() throws Exception {

+        cleanupHdfs();

+        cleanupHyracks();

+    }

+

+    /**

+     * cleanup hdfs cluster

+     */

+    private void cleanupHdfs() throws IOException {

+        dfs.delete(new Path("/"), true);

+        FileSystem.closeAll();

+        dfsCluster.shutdown();

+    }

+

+    /**

+     * cleanup hyracks cluster

+     */

+    private void cleanupHyracks() throws Exception {

+        Iterator<NodeControllerService> iterator = ncs.values().iterator();

+        while (iterator.hasNext()) {

+            NodeControllerService nc = iterator.next();

+            nc.stop();

+        }

+        cc.stop();

+    }

+

+    protected static List<String> getIgnoreList(String ignorePath) throws FileNotFoundException, IOException {

+        BufferedReader reader = new BufferedReader(new FileReader(ignorePath));

+        String s = null;

+        List<String> ignores = new ArrayList<String>();

+        while ((s = reader.readLine()) != null) {

+            ignores.add(s);

+        }

+        reader.close();

+        return ignores;

+    }

+

+    protected static boolean isIgnored(String q, List<String> ignoreList) {

+        for (String ignore : ignoreList) {

+            if (q.indexOf(ignore) >= 0) {

+                return true;

+            }

+        }

+        return false;

+    }

+

+    protected void loadData() throws IOException {

+

+        makeDir("/tpch");

+        makeDir("/tpch/customer");

+        makeDir("/tpch/lineitem");

+        makeDir("/tpch/orders");

+        makeDir("/tpch/part");

+        makeDir("/tpch/partsupp");

+        makeDir("/tpch/supplier");

+        makeDir("/tpch/nation");

+        makeDir("/tpch/region");

+

+        makeDir("/test");

+        makeDir("/test/joinsrc1");

+        makeDir("/test/joinsrc2");

+

+        loadFiles(PATH_TO_DATA + "customer.tbl", "/tpch/customer/");

+        loadFiles(PATH_TO_DATA + "lineitem.tbl", "/tpch/lineitem/");

+        loadFiles(PATH_TO_DATA + "orders.tbl", "/tpch/orders/");

+        loadFiles(PATH_TO_DATA + "part.tbl", "/tpch/part/");

+        loadFiles(PATH_TO_DATA + "partsupp.tbl", "/tpch/partsupp/");

+        loadFiles(PATH_TO_DATA + "supplier.tbl", "/tpch/supplier/");

+        loadFiles(PATH_TO_DATA + "nation.tbl", "/tpch/nation/");

+        loadFiles(PATH_TO_DATA + "region.tbl", "/tpch/region/");

+

+        loadFiles(PATH_TO_DATA + "large_card_join_src.tbl", "/test/joinsrc1/");

+        loadFiles(PATH_TO_DATA + "large_card_join_src_small.tbl", "/test/joinsrc2/");

+    }

+

+}

diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/datagen/RecordBalance.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/datagen/RecordBalance.java
new file mode 100644
index 0000000..ac029b1
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/datagen/RecordBalance.java
@@ -0,0 +1,75 @@
+package edu.uci.ics.hivesterix.test.datagen;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+@SuppressWarnings("deprecation")
+public class RecordBalance {
+
+    private static String confPath = System.getenv("HADDOP_HOME");
+    private static Path[] inputPaths = { new Path("/tpch/100x/customer"), new Path("/tpch/100x/nation"),
+            new Path("/tpch/100x/region"), new Path("/tpch/100x/lineitem"), new Path("/tpch/100x/orders"),
+            new Path("/tpch/100x/part"), new Path("/tpch/100x/partsupp"), new Path("/tpch/100x/supplier") };
+
+    private static Path[] outputPaths = { new Path("/tpch/100/customer"), new Path("/tpch/100/nation"),
+            new Path("/tpch/100/region"), new Path("/tpch/100/lineitem"), new Path("/tpch/100/orders"),
+            new Path("/tpch/100/part"), new Path("/tpch/100/partsupp"), new Path("/tpch/100/supplier") };
+
+    public static class MapRecordOnly extends MapReduceBase implements Mapper<LongWritable, Text, LongWritable, Text> {
+
+        public void map(LongWritable id, Text inputValue, OutputCollector<LongWritable, Text> output, Reporter reporter)
+                throws IOException {
+            output.collect(id, inputValue);
+        }
+    }
+
+    public static class ReduceRecordOnly extends MapReduceBase implements
+            Reducer<LongWritable, Text, NullWritable, Text> {
+
+        NullWritable key = NullWritable.get();
+
+        public void reduce(LongWritable inputKey, Iterator<Text> inputValue,
+                OutputCollector<NullWritable, Text> output, Reporter reporter) throws IOException {
+            while (inputValue.hasNext())
+                output.collect(key, inputValue.next());
+        }
+    }
+
+    public static void main(String[] args) throws IOException {
+
+        for (int i = 0; i < inputPaths.length; i++) {
+            JobConf job = new JobConf(RecordBalance.class);
+            job.addResource(new Path(confPath + "/core-site.xml"));
+            job.addResource(new Path(confPath + "/mapred-site.xml"));
+            job.addResource(new Path(confPath + "/hdfs-site.xml"));
+
+            job.setJobName(RecordBalance.class.getSimpleName());
+            job.setMapperClass(MapRecordOnly.class);
+            job.setReducerClass(ReduceRecordOnly.class);
+            job.setMapOutputKeyClass(LongWritable.class);
+            job.setMapOutputValueClass(Text.class);
+
+            job.setInputFormat(TextInputFormat.class);
+            FileInputFormat.setInputPaths(job, inputPaths[i]);
+            FileOutputFormat.setOutputPath(job, outputPaths[i]);
+            job.setNumReduceTasks(Integer.parseInt(args[0]));
+
+            JobClient.runJob(job);
+        }
+    }
+}
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/legacy/LegacyTestCase.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/legacy/LegacyTestCase.java
new file mode 100644
index 0000000..32c1c3f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/legacy/LegacyTestCase.java
@@ -0,0 +1,142 @@
+package edu.uci.ics.hivesterix.test.legacy;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.common.config.ConfUtil;
+import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
+
+public class LegacyTestCase extends AbstractHivesterixTestCase {
+    private File resultFile;
+    private FileSystem dfs;
+
+    public LegacyTestCase(File queryFile, File resultFile) {
+        super("legacy", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
+
+    @Test
+    public void testRuntimeFunction() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
+
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
+
+        dfs = FileSystem.get(ConfUtil.getJobConf());
+
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            driver.run(query);
+            driver.clear();
+            i++;
+        }
+
+        String warehouse = hconf.get("hive.metastore.warehouse.dir");
+        String tableName = removeExt(resultFile.getName());
+        String directory = warehouse + "/" + tableName + "/";
+        String localDirectory = "tmp";
+
+        FileStatus[] files = dfs.listStatus(new Path(directory));
+        FileSystem lfs = null;
+        if (files == null) {
+            lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+            files = lfs.listStatus(new Path(directory));
+        }
+
+        File resultDirectory = new File(localDirectory + "/" + tableName);
+        deleteDir(resultDirectory);
+        resultDirectory.mkdir();
+
+        for (FileStatus fs : files) {
+            Path src = fs.getPath();
+            if (src.getName().indexOf("crc") >= 0)
+                continue;
+
+            String destStr = localDirectory + "/" + tableName + "/" + src.getName();
+            Path dest = new Path(destStr);
+            if (lfs != null) {
+                lfs.copyToLocalFile(src, dest);
+                dfs.copyFromLocalFile(dest, new Path(directory));
+            } else
+                dfs.copyToLocalFile(src, dest);
+        }
+
+        File[] rFiles = resultDirectory.listFiles();
+        StringBuilder sb = new StringBuilder();
+        for (File r : rFiles) {
+            if (r.getName().indexOf("crc") >= 0)
+                continue;
+            readFileToString(r, sb);
+        }
+        deleteDir(resultDirectory);
+
+        StringBuilder buf = new StringBuilder();
+        readFileToString(resultFile, buf);
+        if (!equal(buf, sb)) {
+            throw new Exception("Result for " + queryFile + " changed:\n" + sw.toString());
+        }
+    }
+
+    private void deleteDir(File resultDirectory) {
+        if (resultDirectory.exists()) {
+            File[] rFiles = resultDirectory.listFiles();
+            for (File r : rFiles)
+                r.delete();
+            resultDirectory.delete();
+        }
+    }
+
+    private boolean equal(StringBuilder sb1, StringBuilder sb2) {
+        String s1 = sb1.toString();
+        String s2 = sb2.toString();
+        String[] rowsOne = s1.split("\n");
+        String[] rowsTwo = s2.split("\n");
+
+        if (rowsOne.length != rowsTwo.length)
+            return false;
+
+        for (int i = 0; i < rowsOne.length; i++) {
+            String row1 = rowsOne[i];
+            String row2 = rowsTwo[i];
+
+            if (row1.equals(row2))
+                continue;
+
+            String[] fields1 = row1.split("");
+            String[] fields2 = row2.split("");
+
+            for (int j = 0; j < fields1.length; j++) {
+                if (fields1[j].equals(fields2[j])) {
+                    continue;
+                } else if (fields1[j].indexOf('.') < 0) {
+                    return false;
+                } else {
+                    Float float1 = Float.parseFloat(fields1[j]);
+                    Float float2 = Float.parseFloat(fields2[j]);
+
+                    if (Math.abs(float1 - float2) == 0)
+                        continue;
+                    else
+                        return false;
+                }
+            }
+        }
+
+        return true;
+    }
+}
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestCase.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestCase.java
new file mode 100644
index 0000000..0e2a5d5
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestCase.java
@@ -0,0 +1,53 @@
+package edu.uci.ics.hivesterix.test.optimizer;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.common.config.ConfUtil;
+import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
+
+public class OptimizerTestCase extends AbstractHivesterixTestCase {
+    private File resultFile;
+
+    OptimizerTestCase(File queryFile, File resultFile) {
+        super("testOptimizer", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
+
+    @Test
+    public void testOptimizer() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
+
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
+
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            if (query.toLowerCase().indexOf("create") >= 0 || query.toLowerCase().indexOf("drop") >= 0
+                    || query.toLowerCase().indexOf("set") >= 0 || query.toLowerCase().startsWith("\n\ncreate")
+                    || query.toLowerCase().startsWith("\n\ndrop") || query.toLowerCase().startsWith("\n\nset"))
+                driver.run(query);
+            else
+                driver.compile(query);
+            driver.clear();
+            i++;
+        }
+        StringBuilder buf = new StringBuilder();
+        readFileToString(resultFile, buf);
+        if (!buf.toString().equals(sw.toString())) {
+            throw new Exception("Result for " + queryFile + " changed:\n" + sw.toString());
+        }
+    }
+}
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuitGenerator.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuitGenerator.java
new file mode 100644
index 0000000..c6b788f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuitGenerator.java
@@ -0,0 +1,75 @@
+package edu.uci.ics.hivesterix.test.optimizer;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.util.List;
+
+import junit.framework.Test;
+import junit.framework.TestResult;
+import edu.uci.ics.hivesterix.test.base.AbstractTestSuiteClass;
+
+public class OptimizerTestSuitGenerator extends AbstractTestSuiteClass {
+    private static final String PATH_TO_QUERIES = "src/test/resources/optimizerts/queries/";
+    private static final String PATH_TO_RESULTS = "src/test/resources/optimizerts/results/";
+    private static final String PATH_TO_IGNORES = "src/test/resources/optimizerts/ignore.txt";
+
+    private static final String FILE_EXTENSION_OF_RESULTS = "plan";
+
+    public static Test suite() throws UnsupportedEncodingException, FileNotFoundException, IOException {
+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);
+        File testData = new File(PATH_TO_QUERIES);
+        File[] queries = testData.listFiles();
+        OptimizerTestSuitGenerator testSuite = new OptimizerTestSuitGenerator();
+        // set hdfs and hyracks cluster, and load test data to hdfs
+        try {
+            testSuite.setup();
+            testSuite.loadData();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new IllegalStateException(e.getMessage());
+        }
+
+        for (File qFile : queries) {
+            if (isIgnored(qFile.getName(), ignores))
+                continue;
+
+            if (qFile.isFile()) {
+                String resultFileName = aqlExtToResExt(qFile.getName());
+                File rFile = new File(PATH_TO_RESULTS + resultFileName);
+                testSuite.addTest(new OptimizerTestSuiteCaseGenerator(qFile, rFile));
+            }
+        }
+        return testSuite;
+    }
+
+    private static String aqlExtToResExt(String fname) {
+        int dot = fname.lastIndexOf('.');
+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
+    }
+
+    /**
+     * Runs the tests and collects their result in a TestResult.
+     */
+    @Override
+    public void run(TestResult result) {
+
+        int testCount = countTestCases();
+        for (int i = 0; i < testCount; i++) {
+            Test each = this.testAt(i);
+            if (result.shouldStop())
+                break;
+            runTest(each, result);
+        }
+
+        // cleanup hdfs and hyracks cluster
+        try {
+            cleanup();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new IllegalStateException(e.getMessage());
+        }
+    }
+
+}
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java
new file mode 100644
index 0000000..8ac4e86
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java
@@ -0,0 +1,53 @@
+package edu.uci.ics.hivesterix.test.optimizer;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.util.List;
+
+import junit.framework.Test;
+import edu.uci.ics.hivesterix.test.base.AbstractTestSuiteClass;
+
+public class OptimizerTestSuite extends AbstractTestSuiteClass {
+
+    private static final String PATH_TO_QUERIES = "src/test/resources/optimizerts/queries/";
+    private static final String PATH_TO_RESULTS = "src/test/resources/optimizerts/results/";
+    private static final String PATH_TO_IGNORES = "src/test/resources/optimizerts/ignore.txt";
+
+    private static final String FILE_EXTENSION_OF_RESULTS = "plan";
+
+    public static Test suite() throws UnsupportedEncodingException, FileNotFoundException, IOException {
+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);
+        File testData = new File(PATH_TO_QUERIES);
+        File[] queries = testData.listFiles();
+        OptimizerTestSuite testSuite = new OptimizerTestSuite();
+
+        // set hdfs and hyracks cluster, and load test data to hdfs
+        try {
+            testSuite.setup();
+            testSuite.loadData();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new IllegalStateException(e.getMessage());
+        }
+
+        for (File qFile : queries) {
+            if (isIgnored(qFile.getName(), ignores))
+                continue;
+
+            if (qFile.isFile() && qFile.getName().startsWith("h11_")) {
+                String resultFileName = hiveExtToResExt(qFile.getName());
+                File rFile = new File(PATH_TO_RESULTS + resultFileName);
+                testSuite.addTest(new OptimizerTestCase(qFile, rFile));
+            }
+        }
+        return testSuite;
+    }
+
+    private static String hiveExtToResExt(String fname) {
+        int dot = fname.lastIndexOf('.');
+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
+    }
+
+}
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuiteCaseGenerator.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuiteCaseGenerator.java
new file mode 100644
index 0000000..ee82eb3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuiteCaseGenerator.java
@@ -0,0 +1,50 @@
+package edu.uci.ics.hivesterix.test.optimizer;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.common.config.ConfUtil;
+import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
+
+public class OptimizerTestSuiteCaseGenerator extends AbstractHivesterixTestCase {
+    private File resultFile;
+
+    OptimizerTestSuiteCaseGenerator(File queryFile, File resultFile) {
+        super("testOptimizer", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
+
+    @Test
+    public void testOptimizer() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
+
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
+
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            if (query.toLowerCase().indexOf("create") >= 0 || query.toLowerCase().indexOf("drop") >= 0
+                    || query.toLowerCase().indexOf("set") >= 0 || query.toLowerCase().startsWith("\n\ncreate")
+                    || query.toLowerCase().startsWith("\n\ndrop") || query.toLowerCase().startsWith("\n\nset"))
+                driver.run(query);
+            else
+                driver.compile(query);
+            driver.clear();
+            i++;
+        }
+        sw.close();
+        writeStringToFile(resultFile, sw);
+    }
+}
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestCase.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestCase.java
new file mode 100644
index 0000000..fdc4c68
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestCase.java
@@ -0,0 +1,147 @@
+package edu.uci.ics.hivesterix.test.runtimefunction;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.common.config.ConfUtil;
+import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
+
+public class RuntimeFunctionTestCase extends AbstractHivesterixTestCase {
+    private File resultFile;
+    private FileSystem dfs;
+
+    RuntimeFunctionTestCase(File queryFile, File resultFile) {
+        super("testRuntimeFunction", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
+
+    @Test
+    public void testRuntimeFunction() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
+
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
+        // Driver driver = new Driver(hconf);
+
+        dfs = FileSystem.get(ConfUtil.getJobConf());
+
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            driver.run(query);
+            driver.clear();
+            i++;
+        }
+
+        String warehouse = hconf.get("hive.metastore.warehouse.dir");
+        String tableName = removeExt(resultFile.getName());
+        String directory = warehouse + "/" + tableName + "/";
+        String localDirectory = "tmp";
+
+        FileStatus[] files = dfs.listStatus(new Path(directory));
+        FileSystem lfs = null;
+        if (files == null) {
+            lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+            files = lfs.listStatus(new Path(directory));
+        }
+
+        File resultDirectory = new File(localDirectory + "/" + tableName);
+        deleteDir(resultDirectory);
+        resultDirectory.mkdir();
+
+        for (FileStatus fs : files) {
+            Path src = fs.getPath();
+            if (src.getName().indexOf("crc") >= 0)
+                continue;
+
+            String destStr = localDirectory + "/" + tableName + "/" + src.getName();
+            Path dest = new Path(destStr);
+            if (lfs != null) {
+                lfs.copyToLocalFile(src, dest);
+                dfs.copyFromLocalFile(dest, new Path(directory));
+            } else
+                dfs.copyToLocalFile(src, dest);
+        }
+
+        File[] rFiles = resultDirectory.listFiles();
+        StringBuilder sb = new StringBuilder();
+        for (File r : rFiles) {
+            if (r.getName().indexOf("crc") >= 0)
+                continue;
+            readFileToString(r, sb);
+        }
+
+        StringBuilder buf = new StringBuilder();
+        readFileToString(resultFile, buf);
+        StringBuffer errorMsg = new StringBuffer();
+        if (!equal(buf, sb, errorMsg)) {
+            throw new Exception("Result for " + queryFile + " changed:\n" + errorMsg.toString());
+        }
+        deleteDir(resultDirectory);
+    }
+
+    private void deleteDir(File resultDirectory) {
+        if (resultDirectory.exists()) {
+            File[] rFiles = resultDirectory.listFiles();
+            for (File r : rFiles)
+                r.delete();
+            resultDirectory.delete();
+        }
+    }
+
+    private boolean equal(StringBuilder sb1, StringBuilder sb2, StringBuffer errorMsg) {
+        String s1 = sb1.toString();
+        String s2 = sb2.toString();
+        String[] rowsOne = s1.split("\n");
+        String[] rowsTwo = s2.split("\n");
+
+        if (rowsOne.length != rowsTwo.length)
+            return false;
+
+        for (int i = 0; i < rowsOne.length; i++) {
+            String row1 = rowsOne[i];
+            String row2 = rowsTwo[i];
+
+            if (row1.equals(row2))
+                continue;
+
+            String[] fields1 = row1.split("");
+            String[] fields2 = row2.split("");
+
+            for (int j = 0; j < fields1.length; j++) {
+                if (fields1[j].equals(fields2[j])) {
+                    continue;
+                } else if (fields1[j].indexOf('.') < 0) {
+                    errorMsg.append("line " + i + " column " + j + ": " + fields2[j] + " expected " + fields1[j]);
+                    return false;
+                } else {
+                    Float float1 = Float.parseFloat(fields1[j]);
+                    Float float2 = Float.parseFloat(fields2[j]);
+
+                    if (Math.abs(float1 - float2) == 0)
+                        continue;
+                    else {
+                        errorMsg.append("line " + i + " column " + j + ": " + fields2[j] + " expected " + fields1[j]);
+                        return false;
+                    }
+                }
+            }
+        }
+
+        return true;
+    }
+}
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuite.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuite.java
new file mode 100644
index 0000000..9610497
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuite.java
@@ -0,0 +1,74 @@
+package edu.uci.ics.hivesterix.test.runtimefunction;

+

+import java.io.File;

+import java.util.List;

+

+import junit.framework.Test;

+import junit.framework.TestResult;

+import edu.uci.ics.hivesterix.test.base.AbstractTestSuiteClass;

+

+public class RuntimeFunctionTestSuite extends AbstractTestSuiteClass {

+

+    private static final String PATH_TO_QUERIES = "src/test/resources/runtimefunctionts/queries/";

+    private static final String PATH_TO_RESULTS = "src/test/resources/runtimefunctionts/results/";

+    private static final String PATH_TO_IGNORES = "src/test/resources/runtimefunctionts/ignore.txt";

+

+    private static final String FILE_EXTENSION_OF_RESULTS = "result";

+

+    public static Test suite() throws Exception {

+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+        File testData = new File(PATH_TO_QUERIES);

+        File[] queries = testData.listFiles();

+        RuntimeFunctionTestSuite testSuite = new RuntimeFunctionTestSuite();

+

+        // set hdfs and hyracks cluster, and load test data to hdfs

+        try {

+            testSuite.setup();

+            testSuite.loadData();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+

+        for (File qFile : queries) {

+            if (isIgnored(qFile.getName(), ignores))

+                continue;

+

+            if (qFile.isFile()) {

+                String resultFileName = hiveExtToResExt(qFile.getName());

+                File rFile = new File(PATH_TO_RESULTS + resultFileName);

+                testSuite.addTest(new RuntimeFunctionTestCase(qFile, rFile));

+            }

+        }

+        return testSuite;

+    }

+

+    private static String hiveExtToResExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+    }

+

+    /**

+     * Runs the tests and collects their result in a TestResult.

+     */

+    @Override

+    public void run(TestResult result) {

+

+        int testCount = countTestCases();

+        for (int i = 0; i < testCount; i++) {

+            Test each = this.testAt(i);

+            if (result.shouldStop())

+                break;

+            runTest(each, result);

+        }

+

+        // cleanup hdfs and hyracks cluster

+        try {

+            cleanup();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+    }

+

+}

diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteCaseGenerator.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteCaseGenerator.java
new file mode 100644
index 0000000..a416759
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteCaseGenerator.java
@@ -0,0 +1,99 @@
+package edu.uci.ics.hivesterix.test.runtimefunction;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Driver;
+import org.junit.Test;
+
+import edu.uci.ics.hivesterix.common.config.ConfUtil;
+import edu.uci.ics.hivesterix.test.base.AbstractHivesterixTestCase;
+
+public class RuntimeFunctionTestSuiteCaseGenerator extends AbstractHivesterixTestCase {
+    private File resultFile;
+    private FileSystem dfs;
+
+    RuntimeFunctionTestSuiteCaseGenerator(File queryFile, File resultFile) {
+        super("testRuntimeFunction", queryFile);
+        this.queryFile = queryFile;
+        this.resultFile = resultFile;
+    }
+
+    @Test
+    public void testRuntimeFunction() throws Exception {
+        StringBuilder queryString = new StringBuilder();
+        readFileToString(queryFile, queryString);
+        String[] queries = queryString.toString().split(";");
+        StringWriter sw = new StringWriter();
+
+        HiveConf hconf = ConfUtil.getHiveConf();
+        Driver driver = new Driver(hconf, new PrintWriter(sw));
+        driver.init();
+
+        dfs = FileSystem.get(ConfUtil.getJobConf());
+
+        int i = 0;
+        for (String query : queries) {
+            if (i == queries.length - 1)
+                break;
+            driver.run(query);
+            driver.clear();
+            i++;
+        }
+
+        String warehouse = hconf.get("hive.metastore.warehouse.dir");
+        String tableName = removeExt(resultFile.getName());
+        String directory = warehouse + "/" + tableName + "/";
+        String localDirectory = "tmp";
+
+        FileStatus[] files = dfs.listStatus(new Path(directory));
+        FileSystem lfs = null;
+        if (files == null) {
+            lfs = FileSystem.getLocal(ConfUtil.getJobConf());
+            files = lfs.listStatus(new Path(directory));
+        }
+
+        File resultDirectory = new File(localDirectory + "/" + tableName);
+        deleteDir(resultDirectory);
+        resultDirectory.mkdir();
+
+        for (FileStatus fs : files) {
+            Path src = fs.getPath();
+            if (src.getName().indexOf("crc") >= 0)
+                continue;
+
+            String destStr = localDirectory + "/" + tableName + "/" + src.getName();
+            Path dest = new Path(destStr);
+            if (lfs != null) {
+                lfs.copyToLocalFile(src, dest);
+                dfs.copyFromLocalFile(dest, new Path(directory));
+            } else
+                dfs.copyToLocalFile(src, dest);
+        }
+
+        File[] rFiles = resultDirectory.listFiles();
+        StringBuilder sb = new StringBuilder();
+        for (File r : rFiles) {
+            if (r.getName().indexOf("crc") >= 0)
+                continue;
+            readFileToString(r, sb);
+        }
+        deleteDir(resultDirectory);
+
+        writeStringToFile(resultFile, sb);
+    }
+
+    private void deleteDir(File resultDirectory) {
+        if (resultDirectory.exists()) {
+            File[] rFiles = resultDirectory.listFiles();
+            for (File r : rFiles)
+                r.delete();
+            resultDirectory.delete();
+        }
+    }
+}
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteGenerator.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteGenerator.java
new file mode 100644
index 0000000..ca2bd6d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/runtimefunction/RuntimeFunctionTestSuiteGenerator.java
@@ -0,0 +1,74 @@
+package edu.uci.ics.hivesterix.test.runtimefunction;

+

+import java.io.File;

+import java.util.List;

+

+import junit.framework.Test;

+import junit.framework.TestResult;

+import edu.uci.ics.hivesterix.test.base.AbstractTestSuiteClass;

+

+public class RuntimeFunctionTestSuiteGenerator extends AbstractTestSuiteClass {

+

+    private static final String PATH_TO_QUERIES = "src/test/resources/runtimefunctionts/queries/";

+    private static final String PATH_TO_RESULTS = "src/test/resources/runtimefunctionts/results/";

+    private static final String PATH_TO_IGNORES = "src/test/resources/runtimefunctionts/ignore.txt";

+

+    private static final String FILE_EXTENSION_OF_RESULTS = "result";

+

+    public static Test suite() throws Exception {

+        List<String> ignores = getIgnoreList(PATH_TO_IGNORES);

+        File testData = new File(PATH_TO_QUERIES);

+        File[] queries = testData.listFiles();

+        RuntimeFunctionTestSuiteGenerator testSuite = new RuntimeFunctionTestSuiteGenerator();

+

+        // set hdfs and hyracks cluster, and load test data to hdfs

+        try {

+            testSuite.setup();

+            testSuite.loadData();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+

+        for (File qFile : queries) {

+            if (isIgnored(qFile.getName(), ignores))

+                continue;

+

+            if (qFile.isFile() && qFile.getName().startsWith("q16_")) {

+                String resultFileName = hiveExtToResExt(qFile.getName());

+                File rFile = new File(PATH_TO_RESULTS + resultFileName);

+                testSuite.addTest(new RuntimeFunctionTestSuiteCaseGenerator(qFile, rFile));

+            }

+        }

+        return testSuite;

+    }

+

+    private static String hiveExtToResExt(String fname) {

+        int dot = fname.lastIndexOf('.');

+        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;

+    }

+

+    /**

+     * Runs the tests and collects their result in a TestResult.

+     */

+    @Override

+    public void run(TestResult result) {

+

+        int testCount = countTestCases();

+        for (int i = 0; i < testCount; i++) {

+            Test each = this.testAt(i);

+            if (result.shouldStop())

+                break;

+            runTest(each, result);

+        }

+

+        // cleanup hdfs and hyracks cluster

+        try {

+            cleanup();

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new IllegalStateException(e.getMessage());

+        }

+    }

+

+}

diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/serde/SerDeTest.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/serde/SerDeTest.java
new file mode 100644
index 0000000..cd39c5a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/serde/SerDeTest.java
@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.test.serde;
+
+import java.util.List;
+import java.util.Properties;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
+import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;
+
+/**
+ * TestLazySimpleSerDe.
+ */
+@SuppressWarnings({ "deprecation", "rawtypes" })
+public class SerDeTest extends TestCase {
+
+    /**
+     * Test the LazySimpleSerDe class.
+     */
+    public void testLazySimpleSerDe() throws Throwable {
+        try {
+            // Create the SerDe
+            LazySimpleSerDe serDe = new LazySimpleSerDe();
+            Configuration conf = new Configuration();
+            Properties tbl = createProperties();
+            serDe.initialize(conf, tbl);
+
+            LazySerDe outputSerde = new LazySerDe();
+            outputSerde.initialize(conf, tbl);
+
+            // Data
+            String s = "123\t456\t789\t1000\t5.3\thive and hadoop\t1\tqf";
+
+            byte[] bytes = s.getBytes();
+            Writable bytesWritable = new BytesWritable(bytes);
+
+            // Test
+            // deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+            Object row = serDe.deserialize(bytesWritable); // test my serde
+            StructObjectInspector simpleInspector = (StructObjectInspector) serDe.getObjectInspector();
+            List<Object> fields = simpleInspector.getStructFieldsDataAsList(row);
+            List<? extends StructField> fieldRefs = simpleInspector.getAllStructFieldRefs();
+
+            int i = 0;
+            for (Object field : fields) {
+                BytesWritable fieldWritable = (BytesWritable) outputSerde.serialize(field, fieldRefs.get(i)
+                        .getFieldObjectInspector());
+                System.out.print(fieldWritable.getSize() + "|");
+                i++;
+            }
+
+            // Writable output = outputSerde.serialize(row, serDe
+            // .getObjectInspector());
+            // System.out.println(output);
+            //
+            // Object row2 = outputSerde.deserialize(output);
+            // Writable output2 = serDe.serialize(row2, outputSerde
+            // .getObjectInspector());
+            // System.out.println(output2);
+
+            // System.out.println(output);
+            // deserializeAndSerialize(outputSerde, t, s, expectedFieldsData);
+
+        } catch (Throwable e) {
+            e.printStackTrace();
+            throw e;
+        }
+    }
+
+    private void deserializeAndSerialize(SerDe serDe, Text t, String s, Object[] expectedFieldsData)
+            throws SerDeException {
+        // Get the row structure
+        StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
+        List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
+        assertEquals(8, fieldRefs.size());
+
+        // Deserialize
+        Object row = serDe.deserialize(t);
+        for (int i = 0; i < fieldRefs.size(); i++) {
+            Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
+            if (fieldData != null) {
+                fieldData = ((LazyPrimitive) fieldData).getWritableObject();
+            }
+            assertEquals("Field " + i, expectedFieldsData[i], fieldData);
+        }
+        // Serialize
+        assertEquals(Text.class, serDe.getSerializedClass());
+        Text serializedText = (Text) serDe.serialize(row, oi);
+        assertEquals("Serialized data", s, serializedText.toString());
+    }
+
+    private Properties createProperties() {
+        Properties tbl = new Properties();
+
+        // Set the configuration parameters
+        tbl.setProperty(Constants.SERIALIZATION_FORMAT, "9");
+        tbl.setProperty("columns", "abyte,ashort,aint,along,adouble,astring,anullint,anullstring");
+        tbl.setProperty("columns.types", "tinyint:smallint:int:bigint:double:string:int:string");
+        tbl.setProperty(Constants.SERIALIZATION_NULL_FORMAT, "NULL");
+        return tbl;
+    }
+
+    /**
+     * Test the LazySimpleSerDe class with LastColumnTakesRest option.
+     */
+    public void testLazySimpleSerDeLastColumnTakesRest() throws Throwable {
+        try {
+            // Create the SerDe
+            LazySimpleSerDe serDe = new LazySimpleSerDe();
+            Configuration conf = new Configuration();
+            Properties tbl = createProperties();
+            tbl.setProperty(Constants.SERIALIZATION_LAST_COLUMN_TAKES_REST, "true");
+            serDe.initialize(conf, tbl);
+
+            // Data
+            Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\ta\tb\t");
+            String s = "123\t456\t789\t1000\t5.3\thive and hadoop\tNULL\ta\tb\t";
+            Object[] expectedFieldsData = { new ByteWritable((byte) 123), new ShortWritable((short) 456),
+                    new IntWritable(789), new LongWritable(1000), new DoubleWritable(5.3), new Text("hive and hadoop"),
+                    null, new Text("a\tb\t") };
+
+            // Test
+            deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+
+        } catch (Throwable e) {
+            e.printStackTrace();
+            throw e;
+        }
+    }
+
+    /**
+     * Test the LazySimpleSerDe class with extra columns.
+     */
+    public void testLazySimpleSerDeExtraColumns() throws Throwable {
+        try {
+            // Create the SerDe
+            LazySimpleSerDe serDe = new LazySimpleSerDe();
+            Configuration conf = new Configuration();
+            Properties tbl = createProperties();
+            serDe.initialize(conf, tbl);
+
+            // Data
+            Text t = new Text("123\t456\t789\t1000\t5.3\thive and hadoop\t1.\ta\tb\t");
+            String s = "123\t456\t789\t1000\t5.3\thive and hadoop\tNULL\ta";
+            Object[] expectedFieldsData = { new ByteWritable((byte) 123), new ShortWritable((short) 456),
+                    new IntWritable(789), new LongWritable(1000), new DoubleWritable(5.3), new Text("hive and hadoop"),
+                    null, new Text("a") };
+
+            // Test
+            deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+
+        } catch (Throwable e) {
+            e.printStackTrace();
+            throw e;
+        }
+    }
+
+    /**
+     * Test the LazySimpleSerDe class with missing columns.
+     */
+    public void testLazySimpleSerDeMissingColumns() throws Throwable {
+        try {
+            // Create the SerDe
+            LazySimpleSerDe serDe = new LazySimpleSerDe();
+            Configuration conf = new Configuration();
+            Properties tbl = createProperties();
+            serDe.initialize(conf, tbl);
+
+            // Data
+            Text t = new Text("123\t456\t789\t1000\t5.3\t");
+            String s = "123\t456\t789\t1000\t5.3\t\tNULL\tNULL";
+            Object[] expectedFieldsData = { new ByteWritable((byte) 123), new ShortWritable((short) 456),
+                    new IntWritable(789), new LongWritable(1000), new DoubleWritable(5.3), new Text(""), null, null };
+
+            // Test
+            deserializeAndSerialize(serDe, t, s, expectedFieldsData);
+
+        } catch (Throwable e) {
+            e.printStackTrace();
+            throw e;
+        }
+    }
+
+}
diff --git a/hivesterix/hivesterix-dist/src/test/resources/log4j.properties b/hivesterix/hivesterix-dist/src/test/resources/log4j.properties
new file mode 100755
index 0000000..d5e6004
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/log4j.properties
@@ -0,0 +1,94 @@
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=FATAL,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=FATAL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# Rolling File Appender
+#
+
+#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Logfile size and and 30-day backups
+#log4j.appender.RFA.MaxFileSize=1MB
+#log4j.appender.RFA.MaxBackupIndex=30
+
+#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+#
+# FSNamesystem Audit logging
+# All audit events are logged at INFO level
+#
+log4j.logger.org.apache.hadoop.fs.FSNamesystem.audit=WARN
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
diff --git a/hivesterix/hivesterix-dist/src/test/resources/logging.properties b/hivesterix/hivesterix-dist/src/test/resources/logging.properties
new file mode 100644
index 0000000..1cc34e1
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/logging.properties
@@ -0,0 +1,65 @@
+############################################################
+#  	Default Logging Configuration File
+#
+# You can use a different file by specifying a filename
+# with the java.util.logging.config.file system property.  
+# For example java -Djava.util.logging.config.file=myfile
+############################################################
+
+############################################################
+#  	Global properties
+############################################################
+
+# "handlers" specifies a comma separated list of log Handler 
+# classes.  These handlers will be installed during VM startup.
+# Note that these classes must be on the system classpath.
+# By default we only configure a ConsoleHandler, which will only
+# show messages at the INFO and above levels.
+
+handlers= java.util.logging.ConsoleHandler
+
+# To also add the FileHandler, use the following line instead.
+
+# handlers= java.util.logging.FileHandler, java.util.logging.ConsoleHandler
+
+# Default global logging level.
+# This specifies which kinds of events are logged across
+# all loggers.  For any given facility this global level
+# can be overriden by a facility specific level
+# Note that the ConsoleHandler also has a separate level
+# setting to limit messages printed to the console.
+
+.level= WARNING
+# .level= INFO
+# .level= FINE
+# .level = FINEST
+
+############################################################
+# Handler specific properties.
+# Describes specific configuration info for Handlers.
+############################################################
+
+# default file output is in user's home directory.
+
+# java.util.logging.FileHandler.pattern = %h/java%u.log
+# java.util.logging.FileHandler.limit = 50000
+# java.util.logging.FileHandler.count = 1
+# java.util.logging.FileHandler.formatter = java.util.logging.XMLFormatter
+
+# Limit the message that are printed on the console to FINE and above.
+
+java.util.logging.ConsoleHandler.level = FINE
+java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
+
+
+############################################################
+# Facility specific properties.
+# Provides extra control for each logger.
+############################################################
+
+# For example, set the com.xyz.foo logger to only log SEVERE
+# messages:
+
+edu.uci.ics.asterix.level = WARNING
+edu.uci.ics.algebricks.level = WARNING
+edu.uci.ics.hyracks.level = WARNING
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/hive/conf/hive-default.xml b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/hive/conf/hive-default.xml
new file mode 100644
index 0000000..d5d0149
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/hive/conf/hive-default.xml
@@ -0,0 +1,793 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+	<!-- Hive Configuration can either be stored in this file or in the hadoop 
+		configuration files -->
+	<!-- that are implied by Hadoop setup variables. -->
+	<!-- Aside from Hadoop setup variables - this file is provided as a convenience 
+		so that Hive -->
+	<!-- users do not have to edit hadoop configuration files (that may be managed 
+		as a centralized -->
+	<!-- resource). -->
+
+	<!-- Hive Execution Parameters -->
+	<property>
+		<name>mapred.reduce.tasks</name>
+		<value>-1</value>
+		<description>The default number of reduce tasks per job. Typically set
+			to a prime close to the number of available hosts. Ignored when
+			mapred.job.tracker is "local". Hadoop set this to 1 by default,
+			whereas hive uses -1 as its default value.
+			By setting this property to
+			-1, Hive will automatically figure out what
+			should be the number of
+			reducers.
+  </description>
+	</property>
+
+	<property>
+		<name>hive.hyracks.host</name>
+		<value>127.0.0.1</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.port</name>
+		<value>13099</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.app</name>
+		<value>hivesterix</value>
+	</property>
+
+
+	<property>
+		<name>hive.hyracks.parrallelism</name>
+		<value>2</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.groupby.external</name>
+		<value>true</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.groupby.external.memory</name>
+		<value>3072</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.sort.memory</name>
+		<value>3072</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.framesize</name>
+		<value>768</value>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.bytes.per.reducer</name>
+		<value>1000000000</value>
+		<description>size per reducer.The default is 1G, i.e if the input size
+			is 10G, it will use 10 reducers.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.max</name>
+		<value>999</value>
+		<description>max number of reducers will be used. If the one
+			specified
+			in the configuration parameter mapred.reduce.tasks is
+			negative, hive
+			will use this one as the max number of reducers when
+			automatically
+			determine number of reducers.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.scratchdir</name>
+		<value>/tmp/hive-${user.name}</value>
+		<description>Scratch space for Hive jobs</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode</name>
+		<value>false</value>
+		<description>whether hive is running in test mode. If yes, it turns on
+			sampling and prefixes the output tablename</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.prefix</name>
+		<value>test_</value>
+		<description>if hive is running in test mode, prefixes the output
+			table by this string</description>
+	</property>
+
+	<!-- If the input table is not bucketed, the denominator of the tablesample 
+		is determinied by the parameter below -->
+	<!-- For example, the following query: -->
+	<!-- INSERT OVERWRITE TABLE dest -->
+	<!-- SELECT col1 from src -->
+	<!-- would be converted to -->
+	<!-- INSERT OVERWRITE TABLE test_dest -->
+	<!-- SELECT col1 from src TABLESAMPLE (BUCKET 1 out of 32 on rand(1)) -->
+	<property>
+		<name>hive.test.mode.samplefreq</name>
+		<value>32</value>
+		<description>if hive is running in test mode and table is not
+			bucketed, sampling frequency</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.nosamplelist</name>
+		<value></value>
+		<description>if hive is running in test mode, dont sample the above
+			comma seperated list of tables</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.local</name>
+		<value>true</value>
+		<description>controls whether to connect to remove metastore server or
+			open a new metastore server in Hive Client JVM</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionURL</name>
+		<value>jdbc:derby:;databaseName=metastore_db;create=true</value>
+		<description>JDBC connect string for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionDriverName</name>
+		<value>org.apache.derby.jdbc.EmbeddedDriver</value>
+		<description>Driver class name for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.PersistenceManagerFactoryClass</name>
+		<value>org.datanucleus.jdo.JDOPersistenceManagerFactory</value>
+		<description>class implementing the jdo persistence</description>
+	</property>
+
+	<property>
+		<name>datanucleus.connectionPoolingType</name>
+		<value>DBCP</value>
+		<description>Uses a DBCP connection pool for JDBC metastore
+		</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.DetachAllOnCommit</name>
+		<value>true</value>
+		<description>detaches all objects from session so that they can be
+			used after transaction is committed</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.NonTransactionalRead</name>
+		<value>true</value>
+		<description>reads outside of transactions</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionUserName</name>
+		<value>APP</value>
+		<description>username to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionPassword</name>
+		<value>mine</value>
+		<description>password to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateTables</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateColumns</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateConstraints</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema </description>
+	</property>
+
+	<property>
+		<name>datanucleus.storeManagerType</name>
+		<value>rdbms</value>
+		<description>metadata store type</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoCreateSchema</name>
+		<value>true</value>
+		<description>creates necessary schema on a startup if one doesn't
+			exist. set this to false, after creating it once</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoStartMechanismMode</name>
+		<value>checked</value>
+		<description>throw exception if metadata tables are incorrect
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.transactionIsolation</name>
+		<value>read-committed</value>
+		<description>Default transaction isolation level for identity
+			generation. </description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2</name>
+		<value>false</value>
+		<description>Use a level 2 cache. Turn this off if metadata is changed
+			independently of hive metastore server</description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2.type</name>
+		<value>SOFT</value>
+		<description>SOFT=soft reference based cache, WEAK=weak reference
+			based cache.</description>
+	</property>
+
+	<property>
+		<name>datanucleus.identifierFactory</name>
+		<value>datanucleus</value>
+		<description>Name of the identifier factory to use when generating
+			table/column names etc. 'datanucleus' is used for backward
+			compatibility</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.warehouse.dir</name>
+		<value>/tmp/hivesterix</value>
+		<description>location of default database for the warehouse
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.connect.retries</name>
+		<value>5</value>
+		<description>Number of retries while opening a connection to metastore
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.rawstore.impl</name>
+		<value>org.apache.hadoop.hive.metastore.ObjectStore</value>
+		<description>Name of the class that implements
+			org.apache.hadoop.hive.metastore.rawstore interface. This class is
+			used to store and retrieval of raw metadata objects such as table,
+			database</description>
+	</property>
+
+	<property>
+		<name>hive.default.fileformat</name>
+		<value>TextFile</value>
+		<description>Default file format for CREATE TABLE statement. Options
+			are TextFile and SequenceFile. Users can explicitly say CREATE TABLE
+			... STORED AS &lt;TEXTFILE|SEQUENCEFILE&gt; to override</description>
+	</property>
+
+	<property>
+		<name>hive.fileformat.check</name>
+		<value>true</value>
+		<description>Whether to check file format or not when loading data
+			files</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr</name>
+		<value>true</value>
+		<description>Whether to use map-side aggregation in Hive Group By
+			queries</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.skewindata</name>
+		<value>false</value>
+		<description>Whether there is skew in data to optimize group by
+			queries</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.mapaggr.checkinterval</name>
+		<value>100000</value>
+		<description>Number of rows after which size of the grouping
+			keys/aggregation classes is performed</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.local.mem</name>
+		<value>0</value>
+		<description>For local mode, memory of the mappers/reducers
+		</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.percentmemory</name>
+		<value>0.5</value>
+		<description>Portion of total memory to be used by map-side grup
+			aggregation hash table</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.min.reduction</name>
+		<value>0.5</value>
+		<description>Hash aggregation will be turned off if the ratio between
+			hash
+			table size and input rows is bigger than this number. Set to 1 to
+			make
+			sure
+			hash aggregation is never turned off.</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.cp</name>
+		<value>true</value>
+		<description>Whether to enable column pruner</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.ppd</name>
+		<value>true</value>
+		<description>Whether to enable predicate pushdown</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.pruner</name>
+		<value>true</value>
+		<description>Whether to enable the new partition pruner which depends
+			on predicate pushdown. If this is disabled,
+			the old partition pruner
+			which is based on AST will be enabled.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.groupby</name>
+		<value>true</value>
+		<description>Whether to enable the bucketed group by from bucketed
+			partitions/tables.</description>
+	</property>
+
+	<property>
+		<name>hive.join.emit.interval</name>
+		<value>1000</value>
+		<description>How many rows in the right-most join operand Hive should
+			buffer before emitting the join result. </description>
+	</property>
+
+	<property>
+		<name>hive.join.cache.size</name>
+		<value>25000</value>
+		<description>How many rows in the joining tables (except the streaming
+			table) should be cached in memory. </description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.bucket.cache.size</name>
+		<value>100</value>
+		<description>How many values in each keys in the map-joined table
+			should be cached in memory. </description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.maxsize</name>
+		<value>100000</value>
+		<description>Maximum # of rows of the small table that can be handled
+			by map-side join. If the size is reached and hive.task.progress is
+			set, a fatal error counter is set and the job will be killed.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.cache.numrows</name>
+		<value>25000</value>
+		<description>How many rows should be cached by jdbm for map join.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.skewjoin</name>
+		<value>false</value>
+		<description>Whether to enable skew join optimization. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.key</name>
+		<value>100000</value>
+		<description>Determine if we get a skew key in join. If we see more
+			than the specified number of rows with the same key in join operator,
+			we think the key as a skew join key. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.map.tasks</name>
+		<value>10000</value>
+		<description> Determine the number of map task used in the follow up
+			map join job
+			for a skew join. It should be used together with
+			hive.skewjoin.mapjoin.min.split
+			to perform a fine grained control.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.min.split</name>
+		<value>33554432</value>
+		<description> Determine the number of map task at most used in the
+			follow up map join job
+			for a skew join by specifying the minimum split
+			size. It should be used
+			together with
+			hive.skewjoin.mapjoin.map.tasks
+			to perform a fine grained control.</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.mode</name>
+		<value>nonstrict</value>
+		<description>The mode in which the hive operations are being
+			performed. In strict mode, some risky queries are not allowed to run
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.maxerrsize</name>
+		<value>100000</value>
+		<description>Maximum number of bytes a script is allowed to emit to
+			standard error (per map-reduce task). This prevents runaway scripts
+			from filling logs partitions to capacity </description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.allow.partial.consumption</name>
+		<value>false</value>
+		<description> When enabled, this option allows a user script to exit
+			successfully without consuming all the data from the standard input.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.operator.id.env.var</name>
+		<value>HIVE_SCRIPT_OPERATOR_ID</value>
+		<description> Name of the environment variable that holds the unique
+			script operator ID in the user's transform function (the custom
+			mapper/reducer that the user has specified in the query)
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.output</name>
+		<value>false</value>
+		<description> This controls whether the final outputs of a query (to a
+			local/hdfs file or a hive table) is compressed. The compression codec
+			and other options are determined from hadoop config variables
+			mapred.output.compress* </description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.intermediate</name>
+		<value>false</value>
+		<description> This controls whether intermediate files produced by
+			hive between multiple map-reduce jobs are compressed. The compression
+			codec and other options are determined from hadoop config variables
+			mapred.output.compress* </description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel</name>
+		<value>false</value>
+		<description>Whether to execute jobs in parallel</description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel.thread.number</name>
+		<value>8</value>
+		<description>How many jobs at most can be executed in parallel
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.war.file</name>
+		<value>lib\hive-hwi-0.7.0.war</value>
+		<description>This sets the path to the HWI war file, relative to
+			${HIVE_HOME}. </description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.host</name>
+		<value>0.0.0.0</value>
+		<description>This is the host address the Hive Web Interface will
+			listen on</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.port</name>
+		<value>9999</value>
+		<description>This is the port the Hive Web Interface will listen on
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.pre.hooks</name>
+		<value></value>
+		<description>Pre Execute Hook for Tests</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapfiles</name>
+		<value>true</value>
+		<description>Merge small files at the end of a map-only job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapredfiles</name>
+		<value>false</value>
+		<description>Merge small files at the end of a map-reduce job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.heartbeat.interval</name>
+		<value>1000</value>
+		<description>Send a heartbeat after this interval - used by mapjoin
+			and filter operators</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.per.task</name>
+		<value>256000000</value>
+		<description>Size of merged files at the end of the job</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.smallfiles.avgsize</name>
+		<value>16000000</value>
+		<description>When the average output file size of a job is less than
+			this number, Hive will start an additional map-reduce job to merge
+			the output files into bigger files. This is only done for map-only
+			jobs if hive.merge.mapfiles is true, and for map-reduce jobs if
+			hive.merge.mapredfiles is true.</description>
+	</property>
+
+	<property>
+		<name>hive.script.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive Tranform/Map/Reduce Clause should
+			automatically send progress information to TaskTracker to avoid the
+			task getting killed because of inactivity. Hive sends progress
+			information when the script is outputting to stderr. This option
+			removes the need of periodically producing stderr messages, but users
+			should be cautious because this may prevent infinite loops in the
+			scripts to be killed by TaskTracker.  </description>
+	</property>
+
+	<property>
+		<name>hive.script.serde</name>
+		<value>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</value>
+		<description>The default serde for trasmitting input data to and
+			reading output data from the user scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.script.recordreader</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordReader</value>
+		<description>The default record reader for reading data from the user
+			scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.script.recordwriter</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordWriter</value>
+		<description>The default record writer for writing data to the user
+			scripts. </description>
+	</property>
+
+	<property>
+		<name>hive.input.format</name>
+		<value>org.apache.hadoop.hive.ql.io.HiveInputFormat</value>
+		<description>The default input format, if it is not specified, the
+			system assigns it. It is set to HiveInputFormat for hadoop versions
+			17, 18 and 19, whereas it is set to CombinedHiveInputFormat for
+			hadoop 20. The user can always overwrite it - if there is a bug in
+			CombinedHiveInputFormat, it can always be manually set to
+			HiveInputFormat. </description>
+	</property>
+
+	<property>
+		<name>hive.udtf.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive should automatically send progress
+			information to TaskTracker when using UDTF's to prevent the task
+			getting killed because of inactivity. Users should be cautious
+			because this may prevent TaskTracker from killing tasks with infinte
+			loops.  </description>
+	</property>
+
+	<property>
+		<name>hive.mapred.reduce.tasks.speculative.execution</name>
+		<value>true</value>
+		<description>Whether speculative execution for reducers should be
+			turned on. </description>
+	</property>
+
+	<property>
+		<name>hive.exec.counters.pull.interval</name>
+		<value>1000</value>
+		<description>The interval with which to poll the JobTracker for the
+			counters the running job. The smaller it is the more load there will
+			be on the jobtracker, the higher it is the less granular the caught
+			will be.</description>
+	</property>
+
+	<property>
+		<name>hive.enforce.bucketing</name>
+		<value>false</value>
+		<description>Whether bucketing is enforced. If true, while inserting
+			into the table, bucketing is enforced. </description>
+	</property>
+
+	<property>
+		<name>hive.enforce.sorting</name>
+		<value>false</value>
+		<description>Whether sorting is enforced. If true, while inserting
+			into the table, sorting is enforced. </description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.connection.url.hook</name>
+		<value></value>
+		<description>Name of the hook to use for retriving the JDO connection
+			URL. If empty, the value in javax.jdo.option.ConnectionURL is used
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.attempts</name>
+		<value>1</value>
+		<description>The number of times to retry a metastore call if there
+			were a connection error</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.interval</name>
+		<value>1000</value>
+		<description>The number of miliseconds between metastore retry
+			attempts</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.min.threads</name>
+		<value>200</value>
+		<description>Minimum number of worker threads in the Thrift server's
+			pool.</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.max.threads</name>
+		<value>100000</value>
+		<description>Maximum number of worker threads in the Thrift server's
+			pool.</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.tcp.keepalive</name>
+		<value>true</value>
+		<description>Whether to enable TCP keepalive for the metastore server.
+			Keepalive will prevent accumulation of half-open connections.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.reducededuplication</name>
+		<value>true</value>
+		<description>Remove extra map-reduce jobs if the data is already
+			clustered by the same key which needs to be used again. This should
+			always be set to true. Since it is a new feature, it has been made
+			configurable.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition</name>
+		<value>false</value>
+		<description>Whether or not to allow dynamic partitions in DML/DDL.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition.mode</name>
+		<value>strict</value>
+		<description>In strict mode, the user must specify at least one static
+			partition in case the user accidentally overwrites all partitions.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions</name>
+		<value>1000</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in total.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions.pernode</name>
+		<value>100</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in each mapper/reducer node.</description>
+	</property>
+
+	<property>
+		<name>hive.default.partition.name</name>
+		<value>__HIVE_DEFAULT_PARTITION__</value>
+		<description>The default partition name in case the dynamic partition
+			column value is null/empty string or anyother values that cannot be
+			escaped. This value must not contain any special character used in
+			HDFS URI (e.g., ':', '%', '/' etc). The user has to be aware that the
+			dynamic partition value should not contain this value to avoid
+			confusions.</description>
+	</property>
+
+	<property>
+		<name>fs.har.impl</name>
+		<value>org.apache.hadoop.hive.shims.HiveHarFileSystem</value>
+		<description>The implementation for accessing Hadoop Archives. Note
+			that this won't be applicable to Hadoop vers less than 0.20
+		</description>
+	</property>
+
+	<property>
+		<name>hive.archive.enabled</name>
+		<value>false</value>
+		<description>Whether archiving operations are permitted</description>
+	</property>
+
+	<property>
+		<name>hive.archive.har.parentdir.settable</name>
+		<value>false</value>
+		<description>In new Hadoop versions, the parent directory must be set
+			while
+			creating a HAR. Because this functionality is hard to detect
+			with just
+			version
+			numbers, this conf var needs to be set manually.
+		</description>
+	</property>
+
+	<!-- HBase Storage Handler Parameters -->
+
+	<property>
+		<name>hive.hbase.wal.enabled</name>
+		<value>true</value>
+		<description>Whether writes to HBase should be forced to the
+			write-ahead log. Disabling this improves HBase write performance at
+			the risk of lost writes in case of a crash.</description>
+	</property>
+
+	<property>
+		<name>hive.exec.drop.ignorenonexistent</name>
+		<value>true</value>
+		<description>drop table always works.</description>
+	</property>
+
+</configuration>
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/ignore.txt b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/ignore.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/ignore.txt
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/h11_share_scan.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/h11_share_scan.hive
new file mode 100644
index 0000000..a5c46c6
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/h11_share_scan.hive
@@ -0,0 +1,10 @@
+-- union case: both subqueries are map jobs on same input, followed by filesink
+DROP TABLE IF EXISTS src;
+
+CREATE TABLE src(key int, value int);
+CREATE TABLE src1(key int, value int);
+CREATE TABLE src2(key int);
+
+FROM src
+INSERT overwrite table src1 select * where key < 5
+INSERT overwrite table src2 select key where key > 10;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/h12_select_struct.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/h12_select_struct.hive
new file mode 100644
index 0000000..24ca265
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/h12_select_struct.hive
@@ -0,0 +1,6 @@
+-- union case: both subqueries are map jobs on same input, followed by filesink
+DROP TABLE IF EXISTS src;
+
+CREATE TABLE src(key int, value struct<v1:int, v2:int>);
+
+select value from src;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q10_returned_item.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q10_returned_item.hive
new file mode 100644
index 0000000..3f1214a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q10_returned_item.hive
@@ -0,0 +1,37 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q10_returned_item;
+
+-- create the tables and load the data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the result table
+create table q10_returned_item (c_custkey int, c_name string, revenue double, c_acctbal string, n_name string, c_address string, c_phone string, c_comment string);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q10_returned_item
+select 
+  c_custkey, c_name, sum(l_extendedprice * (1 - l_discount)) as revenue, 
+  c_acctbal, n_name, c_address, c_phone, c_comment
+from
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey and o.o_orderdate >= '1993-10-01' and o.o_orderdate < '1994-01-01'
+  join nation n 
+  on 
+    c.c_nationkey = n.n_nationkey
+  join lineitem l 
+  on 
+    l.l_orderkey = o.o_orderkey and l.l_returnflag = 'R'
+group by c_custkey, c_name, c_acctbal, c_phone, n_name, c_address, c_comment 
+order by revenue desc 
+limit 20;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q11_important_stock.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q11_important_stock.hive
new file mode 100644
index 0000000..8550b72
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q11_important_stock.hive
@@ -0,0 +1,47 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q11_important_stock;
+DROP TABLE IF EXISTS q11_part_tmp;
+DROP TABLE IF EXISTS q11_sum_tmp;
+
+-- create tables and load data
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+
+-- create the target table
+create table q11_important_stock(ps_partkey INT, value DOUBLE);
+create table q11_part_tmp(ps_partkey int, part_value double);
+create table q11_sum_tmp(total_value double);
+
+-- the query
+insert overwrite table q11_part_tmp
+select 
+  ps_partkey, sum(ps_supplycost * ps_availqty) as part_value 
+from
+  nation n join supplier s 
+  on 
+    s.s_nationkey = n.n_nationkey and n.n_name = 'GERMANY'
+  join partsupp ps 
+  on 
+    ps.ps_suppkey = s.s_suppkey
+group by ps_partkey;
+
+insert overwrite table q11_sum_tmp
+select 
+  sum(part_value) as total_value
+from 
+  q11_part_tmp;
+
+insert overwrite table q11_important_stock
+select 
+  ps_partkey, part_value as value
+from
+  (
+    select ps_partkey, part_value, total_value
+    from q11_part_tmp join q11_sum_tmp
+  ) a
+where part_value > total_value * 0.0001
+order by value desc;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q12_shipping.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q12_shipping.hive
new file mode 100644
index 0000000..062f7b9
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q12_shipping.hive
@@ -0,0 +1,42 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q12_shipping;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create the result table
+create table q12_shipping(l_shipmode string, high_line_count double, low_line_count double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q12_shipping
+select 
+  l_shipmode,
+  sum(case
+    when o_orderpriority ='1-URGENT'
+         or o_orderpriority ='2-HIGH'
+    then 1
+    else 0
+end
+  ) as high_line_count,
+  sum(case
+    when o_orderpriority <> '1-URGENT'
+         and o_orderpriority <> '2-HIGH'
+    then 1
+    else 0
+end
+  ) as low_line_count
+from
+  orders o join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey and l.l_commitdate < l.l_receiptdate
+and l.l_shipdate < l.l_commitdate and l.l_receiptdate >= '1994-01-01' 
+and l.l_receiptdate < '1995-01-01'
+where 
+  l.l_shipmode = 'MAIL' or l.l_shipmode = 'SHIP'
+group by l_shipmode
+order by l_shipmode;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q13_customer_distribution.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q13_customer_distribution.hive
new file mode 100644
index 0000000..a799008
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q13_customer_distribution.hive
@@ -0,0 +1,27 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q13_customer_distribution;
+
+-- create the tables and load the data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create the result table
+create table q13_customer_distribution (c_count int, custdist int);
+
+-- the query
+insert overwrite table q13_customer_distribution
+select 
+  c_count, count(1) as custdist
+from 
+  (select 
+     c_custkey, count(o_orderkey) as c_count
+   from 
+     customer c left outer join orders o 
+     on 
+       c.c_custkey = o.o_custkey and not o.o_comment like '%special%requests%'
+   group by c_custkey
+   ) c_orders
+group by c_count
+order by custdist desc, c_count desc;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q14_promotion_effect.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q14_promotion_effect.hive
new file mode 100644
index 0000000..988f400
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q14_promotion_effect.hive
@@ -0,0 +1,28 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q14_promotion_effect;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q14_promotion_effect(promo_revenue double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q14_promotion_effect
+select 
+  100.00 * sum(case
+               when p_type like 'PROMO%'
+               then l_extendedprice*(1-l_discount)
+               else 0.0
+               end
+  ) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
+from 
+  part p join lineitem l 
+  on 
+    l.l_partkey = p.p_partkey and l.l_shipdate >= '1995-09-01' and l.l_shipdate < '1995-10-01';
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q15_top_supplier.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q15_top_supplier.hive
new file mode 100644
index 0000000..04064ed
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q15_top_supplier.hive
@@ -0,0 +1,45 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS revenue;
+DROP TABLE IF EXISTS max_revenue;
+DROP TABLE IF EXISTS q15_top_supplier;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+
+-- create result tables
+create table revenue(supplier_no int, total_revenue double); 
+create table max_revenue(max_revenue double); 
+create table q15_top_supplier(s_suppkey int, s_name string, s_address string, s_phone string, total_revenue double);
+
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table revenue
+select 
+  l_suppkey as supplier_no, sum(l_extendedprice * (1 - l_discount)) as total_revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1996-01-01' and l_shipdate < '1996-04-01'
+group by l_suppkey;
+
+insert overwrite table max_revenue
+select 
+  max(total_revenue)
+from 
+  revenue;
+
+insert overwrite table q15_top_supplier
+select 
+  s_suppkey, s_name, s_address, s_phone, total_revenue
+from supplier s join revenue r 
+  on 
+    s.s_suppkey = r.supplier_no
+  join max_revenue m 
+  on 
+    r.total_revenue = m.max_revenue
+order by s_suppkey;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q16_parts_supplier_relationship.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q16_parts_supplier_relationship.hive
new file mode 100644
index 0000000..971ef99
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q16_parts_supplier_relationship.hive
@@ -0,0 +1,53 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS q16_parts_supplier_relationship;
+DROP TABLE IF EXISTS q16_tmp;
+DROP TABLE IF EXISTS supplier_tmp;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+
+-- create the result table
+create table q16_parts_supplier_relationship(p_brand string, p_type string, p_size int, supplier_cnt int);
+create table q16_tmp(p_brand string, p_type string, p_size int, ps_suppkey int);
+create table supplier_tmp(s_suppkey int);
+
+-- the query
+insert overwrite table supplier_tmp
+select 
+  s_suppkey
+from 
+  supplier
+where 
+  not s_comment like '%Customer%Complaints%';
+
+insert overwrite table q16_tmp
+select 
+  p_brand, p_type, p_size, ps_suppkey
+from 
+  partsupp ps join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_brand <> 'Brand#45' 
+    and not p.p_type like 'MEDIUM POLISHED%'
+  join supplier_tmp s 
+  on 
+    ps.ps_suppkey = s.s_suppkey;
+
+insert overwrite table q16_parts_supplier_relationship
+select 
+  p_brand, p_type, p_size, count(distinct ps_suppkey) as supplier_cnt
+from 
+  (select 
+     * 
+   from
+     q16_tmp 
+   where p_size = 49 or p_size = 14 or p_size = 23 or
+         p_size = 45 or p_size = 19 or p_size = 3 or
+         p_size = 36 or p_size = 9
+) q16_all
+group by p_brand, p_type, p_size
+order by supplier_cnt desc, p_brand, p_type, p_size;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q17_small_quantity_order_revenue.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q17_small_quantity_order_revenue.hive
new file mode 100644
index 0000000..65291cd
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q17_small_quantity_order_revenue.hive
@@ -0,0 +1,38 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q17_small_quantity_order_revenue;
+DROP TABLE IF EXISTS lineitem_tmp;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q17_small_quantity_order_revenue (avg_yearly double);
+create table lineitem_tmp (t_partkey int, t_avg_quantity double);
+
+-- the query
+insert overwrite table lineitem_tmp
+select 
+  l_partkey as t_partkey, 0.2 * avg(l_quantity) as t_avg_quantity
+from 
+  lineitem
+group by l_partkey;
+
+insert overwrite table q17_small_quantity_order_revenue
+select
+  sum(l_extendedprice) / 7.0 as avg_yearly
+from
+  (select l_quantity, l_extendedprice, t_avg_quantity from
+   lineitem_tmp t join
+     (select
+        l_quantity, l_partkey, l_extendedprice
+      from
+        part p join lineitem l
+        on
+          p.p_partkey = l.l_partkey
+          and p.p_brand = 'Brand#23'
+          and p.p_container = 'MED BOX'
+      ) l1 on l1.l_partkey = t.t_partkey
+   ) a
+where l_quantity < t_avg_quantity;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q18_large_volume_customer.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q18_large_volume_customer.hive
new file mode 100644
index 0000000..76d0475
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q18_large_volume_customer.hive
@@ -0,0 +1,43 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q18_tmp;
+DROP TABLE IF EXISTS q18_large_volume_customer;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+
+-- create the result tables
+create table q18_tmp(l_orderkey int, t_sum_quantity double);
+create table q18_large_volume_customer(c_name string, c_custkey int, o_orderkey int, o_orderdate string, o_totalprice double, sum_quantity double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1164000000;
+
+-- the query
+insert overwrite table q18_tmp
+select 
+  l_orderkey, sum(l_quantity) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q18_large_volume_customer
+select 
+  c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice,sum(l_quantity)
+from 
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey
+  join q18_tmp t 
+  on 
+    o.o_orderkey = t.l_orderkey and t.t_sum_quantity > 300
+  join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey
+group by c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice
+order by o_totalprice desc,o_orderdate
+limit 100;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q19_discounted_revenue.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q19_discounted_revenue.hive
new file mode 100644
index 0000000..fd330cd
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q19_discounted_revenue.hive
@@ -0,0 +1,49 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q19_discounted_revenue;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q19_discounted_revenue(revenue double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q19_discounted_revenue
+select
+  sum(l_extendedprice * (1 - l_discount) ) as revenue
+from
+  	part p join lineitem l
+  on 
+    p.p_partkey = l.l_partkey    
+where
+  (
+    p_brand = 'Brand#12'
+	and p_container REGEXP 'SM CASE||SM BOX||SM PACK||SM PKG'
+	and l_quantity >= 1 and l_quantity <= 11
+	and p_size >= 1 and p_size <= 5
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  ) 
+  or 
+  (
+    p_brand = 'Brand#23'
+	and p_container REGEXP 'MED BAG||MED BOX||MED PKG||MED PACK'
+	and l_quantity >= 10 and l_quantity <= 20
+	and p_size >= 1 and p_size <= 10
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  )
+  or
+  (
+	p_brand = 'Brand#34'
+	and p_container REGEXP 'LG CASE||LG BOX||LG PACK||LG PKG'
+	and l_quantity >= 20 and l_quantity <= 30
+	and p_size >= 1 and p_size <= 15
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  );
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q1_pricing_summary_report.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..a002068
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q1_pricing_summary_report.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q1_pricing_summary_report;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q20_potential_part_promotion.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q20_potential_part_promotion.hive
new file mode 100644
index 0000000..63297e6
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q20_potential_part_promotion.hive
@@ -0,0 +1,78 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q20_tmp1;
+DROP TABLE IF EXISTS q20_tmp2;
+DROP TABLE IF EXISTS q20_tmp3;
+DROP TABLE IF EXISTS q20_tmp4;
+DROP TABLE IF EXISTS q20_potential_part_promotion;
+
+-- create tables and load data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+
+-- create the target table
+create table q20_tmp1(p_partkey int);
+create table q20_tmp2(l_partkey int, l_suppkey int, sum_quantity double);
+create table q20_tmp3(ps_suppkey int, ps_availqty int, sum_quantity double);
+create table q20_tmp4(ps_suppkey int);
+create table q20_potential_part_promotion(s_name string, s_address string);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q20_tmp1
+select distinct p_partkey
+from
+  part 
+where 
+  p_name like 'forest%';
+
+insert overwrite table q20_tmp2
+select 
+  l_partkey, l_suppkey, 0.5 * sum(l_quantity)
+from
+  lineitem
+where
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+group by l_partkey, l_suppkey;
+
+insert overwrite table q20_tmp3
+select 
+  ps_suppkey, ps_availqty, sum_quantity
+from  
+  partsupp ps join q20_tmp1 t1 
+  on 
+    ps.ps_partkey = t1.p_partkey
+  join q20_tmp2 t2 
+  on 
+    ps.ps_partkey = t2.l_partkey and ps.ps_suppkey = t2.l_suppkey;
+
+insert overwrite table q20_tmp4
+select 
+  ps_suppkey
+from 
+  q20_tmp3
+where 
+  ps_availqty > sum_quantity
+group by ps_suppkey;
+
+insert overwrite table q20_potential_part_promotion
+select 
+  s_name, s_address
+from 
+  supplier s join nation n
+  on
+    s.s_nationkey = n.n_nationkey
+    and n.n_name = 'CANADA'
+  join q20_tmp4 t4
+  on 
+    s.s_suppkey = t4.ps_suppkey
+order by s_name;
+
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q21_suppliers_who_kept_orders_waiting.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q21_suppliers_who_kept_orders_waiting.hive
new file mode 100644
index 0000000..a467f60
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q21_suppliers_who_kept_orders_waiting.hive
@@ -0,0 +1,74 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q21_tmp1;
+DROP TABLE IF EXISTS q21_tmp2;
+DROP TABLE IF EXISTS q21_suppliers_who_kept_orders_waiting;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create target tables
+create table q21_tmp1(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_tmp2(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_suppliers_who_kept_orders_waiting(s_name string, numwait int);
+
+-- the query
+insert overwrite table q21_tmp1
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q21_tmp2
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+where
+  l_receiptdate > l_commitdate
+group by l_orderkey;
+
+insert overwrite table q21_suppliers_who_kept_orders_waiting
+select
+  s_name, count(1) as numwait
+from
+  (select s_name from
+(select s_name, t2.l_orderkey, l_suppkey, count_suppkey, max_suppkey 
+ from q21_tmp2 t2 right outer join
+      (select s_name, l_orderkey, l_suppkey from
+         (select s_name, t1.l_orderkey, l_suppkey, count_suppkey, max_suppkey
+          from
+            q21_tmp1 t1 join
+            (select s_name, l_orderkey, l_suppkey
+             from 
+               orders o join
+               (select s_name, l_orderkey, l_suppkey
+                from
+                  nation n join supplier s
+                  on
+                    s.s_nationkey = n.n_nationkey
+                    and n.n_name = 'SAUDI ARABIA'
+                  join lineitem l
+                  on
+                    s.s_suppkey = l.l_suppkey
+                where
+                  l.l_receiptdate > l.l_commitdate
+                ) l1 on o.o_orderkey = l1.l_orderkey and o.o_orderstatus = 'F'
+             ) l2 on l2.l_orderkey = t1.l_orderkey
+          ) a
+          where
+           (count_suppkey > 1) or ((count_suppkey=1) and (l_suppkey <> max_suppkey))
+       ) l3 on l3.l_orderkey = t2.l_orderkey
+    ) b
+    where
+     (count_suppkey is null) or ((count_suppkey=1) and (l_suppkey = max_suppkey))
+  )c
+group by s_name
+order by numwait desc, s_name
+limit 100;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q22_global_sales_opportunity.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q22_global_sales_opportunity.hive
new file mode 100644
index 0000000..a7d6c72
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q22_global_sales_opportunity.hive
@@ -0,0 +1,70 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q22_customer_tmp;
+DROP TABLE IF EXISTS q22_customer_tmp1;
+DROP TABLE IF EXISTS q22_orders_tmp;
+DROP TABLE IF EXISTS q22_global_sales_opportunity;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create target tables
+create table q22_customer_tmp(c_acctbal double, c_custkey int, cntrycode string);
+create table q22_customer_tmp1(avg_acctbal double);
+create table q22_orders_tmp(o_custkey int);
+create table q22_global_sales_opportunity(cntrycode string, numcust int, totacctbal double);
+
+-- the query
+insert overwrite table q22_customer_tmp
+select 
+  c_acctbal, c_custkey, substr(c_phone, 1, 2) as cntrycode
+from 
+  customer
+where 
+  substr(c_phone, 1, 2) = '13' or
+  substr(c_phone, 1, 2) = '31' or
+  substr(c_phone, 1, 2) = '23' or
+  substr(c_phone, 1, 2) = '29' or
+  substr(c_phone, 1, 2) = '30' or
+  substr(c_phone, 1, 2) = '18' or
+  substr(c_phone, 1, 2) = '17';
+ 
+insert overwrite table q22_customer_tmp1
+select
+  avg(c_acctbal)
+from
+  q22_customer_tmp
+where
+  c_acctbal > 0.00;
+
+insert overwrite table q22_orders_tmp
+select 
+  o_custkey 
+from 
+  orders
+group by 
+  o_custkey;
+
+insert overwrite table q22_global_sales_opportunity
+select
+  cntrycode, count(1) as numcust, sum(c_acctbal) as totacctbal
+from
+(
+  select cntrycode, c_acctbal, avg_acctbal from
+  q22_customer_tmp1 ct1 join
+  (
+    select cntrycode, c_acctbal from
+      q22_orders_tmp ot 
+      right outer join q22_customer_tmp ct 
+      on
+        ct.c_custkey = ot.o_custkey
+    where
+      o_custkey is null
+  ) ct2
+) a
+where
+  c_acctbal > avg_acctbal
+group by cntrycode
+order by cntrycode;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q2_minimum_cost_supplier.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q2_minimum_cost_supplier.hive
new file mode 100644
index 0000000..061c5e7
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q2_minimum_cost_supplier.hive
@@ -0,0 +1,56 @@
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp1;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp2;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_size = 15 and p.p_type like '%BRASS' ;
+
+insert overwrite table q2_minimum_cost_supplier_tmp2 
+select 
+  p_partkey, min(ps_supplycost) 
+from  
+  q2_minimum_cost_supplier_tmp1 
+group by p_partkey;
+
+insert overwrite table q2_minimum_cost_supplier 
+select 
+  t1.s_acctbal, t1.s_name, t1.n_name, t1.p_partkey, t1.p_mfgr, t1.s_address, t1.s_phone, t1.s_comment 
+from 
+  q2_minimum_cost_supplier_tmp1 t1 join q2_minimum_cost_supplier_tmp2 t2 
+on 
+  t1.p_partkey = t2.p_partkey and t1.ps_supplycost=t2.ps_min_supplycost 
+order by s_acctbal desc, n_name, s_name, p_partkey 
+limit 100;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q3_shipping_priority.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q3_shipping_priority.hive
new file mode 100644
index 0000000..0049eb3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q3_shipping_priority.hive
@@ -0,0 +1,29 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q3_shipping_priority;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+Insert overwrite table q3_shipping_priority 
+select 
+  l_orderkey, (l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority 
+from 
+  customer c join orders o 
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey 
+  join lineitem l 
+    on l.l_orderkey = o.o_orderkey and l.l_linenumber<3
+-- group by l_orderkey, o_orderdate, o_shippriority 
+order by revenue desc
+limit 10;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q4_order_priority.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q4_order_priority.hive
new file mode 100644
index 0000000..aa828e9
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q4_order_priority.hive
@@ -0,0 +1,30 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q4_order_priority_tmp;
+DROP TABLE IF EXISTS q4_order_priority;
+
+-- create tables and load data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+CREATE TABLE q4_order_priority_tmp (O_ORDERKEY INT);
+CREATE TABLE q4_order_priority (O_ORDERPRIORITY STRING, ORDER_COUNT INT);
+
+set mapred.min.split.size=536870912;
+-- the query
+INSERT OVERWRITE TABLE q4_order_priority_tmp 
+select 
+  DISTINCT l_orderkey 
+from 
+  lineitem 
+where 
+  l_commitdate < l_receiptdate;
+INSERT OVERWRITE TABLE q4_order_priority 
+select o_orderpriority, count(1) as order_count 
+from 
+  orders o join q4_order_priority_tmp t 
+  on 
+o.o_orderkey = t.o_orderkey and o.o_orderdate >= '1993-07-01' and o.o_orderdate < '1993-10-01' 
+group by o_orderpriority 
+order by o_orderpriority;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q5_local_supplier_volume.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..a975ce1
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q5_local_supplier_volume.hive
@@ -0,0 +1,42 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q5_local_supplier_volume;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from orders o join
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from lineitem l join
+        ( select n_name, s_suppkey, s_nationkey from supplier s join
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey and r.r_name = 'ASIA'
+          ) n1 on s.s_nationkey = n1.n_nationkey
+        ) s1 on l.l_suppkey = s1.s_suppkey
+      ) l1 on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1994-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q6_forecast_revenue_change.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q6_forecast_revenue_change.hive
new file mode 100644
index 0000000..d8cb9b9
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q6_forecast_revenue_change.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q6_forecast_revenue_change;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+create table q6_forecast_revenue_change (revenue double);
+
+-- the query
+insert overwrite table q6_forecast_revenue_change 
+select 
+  sum(l_extendedprice*l_discount) as revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+  and l_discount >= 0.05 and l_discount <= 0.07
+  and l_quantity < 24;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q7_volume_shipping.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q7_volume_shipping.hive
new file mode 100644
index 0000000..3dfb22a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q7_volume_shipping.hive
@@ -0,0 +1,71 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q7_volume_shipping;
+DROP TABLE IF EXISTS q7_volume_shipping_tmp;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the target table
+create table q7_volume_shipping (supp_nation string, cust_nation string, l_year int, revenue double);
+create table q7_volume_shipping_tmp(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q7_volume_shipping_tmp
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY'
+    UNION ALL
+select 
+  n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey, 
+  n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n2.n_name = 'FRANCE' and n1.n_name = 'GERMANY'
+) a;
+
+insert overwrite table q7_volume_shipping 
+select 
+  supp_nation, cust_nation, l_year, sum(volume) as revenue
+from 
+  (
+    select
+      supp_nation, cust_nation, year(l_shipdate) as l_year, 
+      l_extendedprice * (1 - l_discount) as volume
+    from
+      q7_volume_shipping_tmp t join
+        (select l_shipdate, l_extendedprice, l_discount, c_nationkey, s_nationkey 
+         from supplier s join
+           (select l_shipdate, l_extendedprice, l_discount, l_suppkey, c_nationkey 
+            from customer c join
+              (select l_shipdate, l_extendedprice, l_discount, l_suppkey, o_custkey 
+               from orders o join lineitem l 
+               on 
+                 o.o_orderkey = l.l_orderkey and l.l_shipdate >= '1995-01-01' 
+                 and l.l_shipdate <= '1996-12-31'
+               ) l1 on c.c_custkey = l1.o_custkey
+            ) l2 on s.s_suppkey = l2.l_suppkey
+         ) l3 on l3.c_nationkey = t.c_nationkey and l3.s_nationkey = t.s_nationkey
+   ) shipping
+group by supp_nation, cust_nation, l_year
+order by supp_nation, cust_nation, l_year;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q8_national_market_share.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q8_national_market_share.hive
new file mode 100644
index 0000000..5e7baaa
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q8_national_market_share.hive
@@ -0,0 +1,56 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q8_national_market_share;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create the result table
+create table q8_national_market_share(o_year string, mkt_share double);
+
+-- the query
+insert overwrite table q8_national_market_share 
+select 
+  o_year, sum(case when nation = 'BRAZIL' then volume else 0.0 end) / sum(volume) as mkt_share
+from 
+  (
+select 
+  year(o_orderdate) as o_year, l_extendedprice * (1-l_discount) as volume, 
+  n2.n_name as nation
+    from
+      nation n2 join
+        (select o_orderdate, l_discount, l_extendedprice, s_nationkey 
+         from supplier s join
+          (select o_orderdate, l_discount, l_extendedprice, l_suppkey 
+           from part p join
+             (select o_orderdate, l_partkey, l_discount, l_extendedprice, l_suppkey 
+              from 
+                (select o_orderdate, o_orderkey 
+                 from orders o join
+                   (select c.c_custkey 
+                    from customer c join
+                      (select n1.n_nationkey 
+                       from nation n1 join region r
+                       on n1.n_regionkey = r.r_regionkey and r.r_name = 'AMERICA'
+                       ) n11 on c.c_nationkey = n11.n_nationkey
+                    ) c1 on c1.c_custkey = o.o_custkey
+                 ) o1 join lineitem l on l.l_orderkey = o1.o_orderkey and o1.o_orderdate >= '1995-01-01' 
+                         and o1.o_orderdate < '1996-12-31'
+              ) l1 on p.p_partkey = l1.l_partkey and p.p_type = 'ECONOMY ANODIZED STEEL'
+           ) p1 on s.s_suppkey = p1.l_suppkey
+        ) s1 on s1.s_nationkey = n2.n_nationkey
+  ) all_nation
+group by o_year
+order by o_year;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q9_product_type_profit.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q9_product_type_profit.hive
new file mode 100644
index 0000000..586779c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/q9_product_type_profit.hive
@@ -0,0 +1,51 @@
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q9_product_type_profit;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      orders o join
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u10_nestedloop_join.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u10_nestedloop_join.hive
new file mode 100644
index 0000000..ce94ac6
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u10_nestedloop_join.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS u10_nestedloop_join;
+
+-- create tables and load data
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the target table
+create table u10_nestedloop_join(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+-- the query
+insert overwrite table u10_nestedloop_join
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 where n1.n_nationkey > n2.n_nationkey
+) a;
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u1_group_by.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u1_group_by.hive
new file mode 100644
index 0000000..1d5c312
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u1_group_by.hive
@@ -0,0 +1,12 @@
+drop table IF EXISTS lineitem;
+
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, 
+L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, 
+L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, 
+L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS 
+TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+select * from (select sum(abs(L_QUANTITY)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX))
+FROM lineitem WHERE L_SHIPDATE<='1998-09-02'  GROUP BY L_RETURNFLAG) T;
+
+drop table lineitem;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u2_select-project.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u2_select-project.hive
new file mode 100644
index 0000000..1cf0c36
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u2_select-project.hive
@@ -0,0 +1,7 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+create table result (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT);

+

+select (2*s_suppkey), s_address, s_nationkey,  s_name   FROM supplier where S_SUPPKEY*2 < 20;

diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u3_union.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u3_union.hive
new file mode 100644
index 0000000..1c84ba8
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u3_union.hive
@@ -0,0 +1,7 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+create table result (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT);

+

+select * from (select (2*s_suppkey), s_address, s_nationkey,  s_name  FROM supplier where S_SUPPKEY*2 < 20 union all select (2*s_suppkey), s_address, s_nationkey,  s_name   FROM supplier where S_SUPPKEY*2 > 50) t;

diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u4_join.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u4_join.hive
new file mode 100644
index 0000000..c013fa6
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u4_join.hive
@@ -0,0 +1,14 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, 

+S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED

+BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+

+create table result (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT);

+

+insert overwrite table result select s_suppkey, s_name, s_address, s_nationkey

+from supplier where S_SUPPKEY*2 < 20;

+

+select result.s_suppkey, supplier.s_phone, supplier.s_acctbal,

+supplier.s_comment from result join supplier on result.s_suppkey=supplier.s_suppkey;

diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u5_lateral_view.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u5_lateral_view.hive
new file mode 100644
index 0000000..2740bca
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u5_lateral_view.hive
@@ -0,0 +1,7 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create table supplier (S_SUPPKEY array<int>, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING);

+create table result (S_SUPPKEY int);

+

+select s_name, s_address, col1 from supplier LATERAL VIEW explode(s_suppkey) suppadd as col1;

diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u6_limit.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u6_limit.hive
new file mode 100644
index 0000000..b268aff
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u6_limit.hive
@@ -0,0 +1,8 @@
+drop table IF EXISTS orders;

+drop table IF EXISTS result;

+drop table IF EXISTS q_limit2;

+

+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';

+create table q_limit2(col1 int, col2 double, col3 string, col4 string);

+

+insert overwrite table q_limit2 select O_ORDERKEY, O_TOTALPRICE, O_ORDERDATE, O_CLERK from orders where O_TOTALPRICE<10000 order by o_totalprice limit 4;
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u7_multi_join.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u7_multi_join.hive
new file mode 100644
index 0000000..2891c56
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u7_multi_join.hive
@@ -0,0 +1,9 @@
+drop table IF EXISTS lineitem;

+drop table IF EXISTS orders;

+drop table IF EXISTS customer;

+

+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';

+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';

+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';

+

+select l_linenumber, o_orderkey, o_totalprice, o_orderdate, o_shippriority from  customer c join orders o  on c.c_custkey = o.o_custkey join lineitem l    on o.o_orderkey = l.l_orderkey  where c.c_custkey<5 and o.o_totalprice<30000;

diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u8_non_mapred.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u8_non_mapred.hive
new file mode 100644
index 0000000..247f2c1
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u8_non_mapred.hive
@@ -0,0 +1,7 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+create table result (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT);

+

+select * FROM supplier;

diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u9_order_by.hive b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u9_order_by.hive
new file mode 100644
index 0000000..8d5d1cf
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/queries/u9_order_by.hive
@@ -0,0 +1,7 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS result;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+create table result (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT);

+

+insert overwrite table result select s_suppkey, s_name, s_address, s_nationkey  FROM supplier where S_SUPPKEY*2 < 20 order by s_name;

diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/h11_share_scan.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/h11_share_scan.plan
new file mode 100644
index 0000000..867bfaf
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/h11_share_scan.plan
@@ -0,0 +1,34 @@
+write [%0->$$1, %0->$$2]
+-- SINK_WRITE  |PARTITIONED|
+  select (function-call: algebricks:lt, Args:[%0->$$1, 5])
+  -- STREAM_SELECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      replicate 
+      -- SPLIT  |PARTITIONED|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          data-scan []<-[$$1, $$2] <- default.src
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$1])
+  -- STREAM_PROJECT  |PARTITIONED|
+    select (function-call: algebricks:gt, Args:[%0->$$1, 10])
+    -- STREAM_SELECT  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        replicate 
+        -- SPLIT  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            data-scan []<-[$$1, $$2] <- default.src
+            -- DATASOURCE_SCAN  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                empty-tuple-source
+                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/h12_select_struct.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/h12_select_struct.plan
new file mode 100644
index 0000000..8bbfb61
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/h12_select_struct.plan
@@ -0,0 +1,10 @@
+write [%0->$$2]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    data-scan [$$2]<-[$$1, $$2] <- default.src
+    -- DATASOURCE_SCAN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        empty-tuple-source
+        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q10_returned_item.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q10_returned_item.plan
new file mode 100644
index 0000000..05b3718
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q10_returned_item.plan
@@ -0,0 +1,102 @@
+write [%0->$$38, %0->$$39, %0->$$45, %0->$$40, %0->$$42, %0->$$43, %0->$$41, %0->$$44]
+-- SINK_WRITE  |UNPARTITIONED|
+  project ([$$38, $$39, $$45, $$40, $$42, $$43, $$41, $$44])
+  -- STREAM_PROJECT  |PARTITIONED|
+    limit 20
+    -- STREAM_LIMIT  |UNPARTITIONED|
+      limit 20
+      -- STREAM_LIMIT  |UNPARTITIONED|
+        exchange 
+        -- SORT_MERGE_EXCHANGE [$$45(DESC) ]  |PARTITIONED|
+          limit 20
+          -- STREAM_LIMIT  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+              order (DESC, %0->$$45) 
+              -- STABLE_SORT [$$45(DESC)]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  group by ([$$38 := %0->$$48; $$39 := %0->$$49; $$40 := %0->$$50; $$41 := %0->$$51; $$42 := %0->$$52; $$43 := %0->$$53; $$44 := %0->$$54]) decor ([]) {
+                            aggregate [$$45] <- [function-call: hive:sum(FINAL), Args:[%0->$$47]]
+                            -- AGGREGATE  |LOCAL|
+                              nested tuple source
+                              -- NESTED_TUPLE_SOURCE  |LOCAL|
+                         }
+                  -- EXTERNAL_GROUP_BY[$$48, $$49, $$50, $$51, $$52, $$53, $$54]  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$48, $$49, $$50, $$51, $$52, $$53, $$54]  |PARTITIONED|
+                      group by ([$$48 := %0->$$21; $$49 := %0->$$22; $$50 := %0->$$26; $$51 := %0->$$25; $$52 := %0->$$18; $$53 := %0->$$23; $$54 := %0->$$28]) decor ([]) {
+                                aggregate [$$47] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]]]]
+                                -- AGGREGATE  |LOCAL|
+                                  nested tuple source
+                                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+                             }
+                      -- EXTERNAL_GROUP_BY[$$21, $$22, $$26, $$25, $$18, $$23, $$28]  |LOCAL|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$21, $$22, $$23, $$25, $$26, $$28, $$18, $$6, $$7])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$29])
+                              -- HYBRID_HASH_JOIN [$$1][$$29]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                                  project ([$$1, $$6, $$7])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    select (function-call: algebricks:eq, Args:[%0->$$9, R])
+                                    -- STREAM_SELECT  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        data-scan [$$1, $$6, $$7, $$9]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            empty-tuple-source
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$29]  |PARTITIONED|
+                                  project ([$$29, $$21, $$22, $$23, $$25, $$26, $$28, $$18])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      join (function-call: algebricks:eq, Args:[%0->$$17, %0->$$24])
+                                      -- HYBRID_HASH_JOIN [$$17][$$24]  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$17]  |PARTITIONED|
+                                          data-scan [$$17, $$18]<-[$$17, $$18, $$19, $$20] <- default.nation
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$24]  |PARTITIONED|
+                                          project ([$$24, $$21, $$22, $$23, $$25, $$26, $$28, $$29])
+                                          -- STREAM_PROJECT  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              join (function-call: algebricks:eq, Args:[%0->$$30, %0->$$21])
+                                              -- HYBRID_HASH_JOIN [$$30][$$21]  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$30]  |PARTITIONED|
+                                                  project ([$$30, $$29])
+                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                    select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$33, 1994-01-01], function-call: algebricks:ge, Args:[%0->$$33, 1993-10-01], function-call: algebricks:lt, Args:[%0->$$33, 1994-01-01]])
+                                                    -- STREAM_SELECT  |PARTITIONED|
+                                                      exchange 
+                                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                        data-scan [$$33, $$29, $$30]<-[$$29, $$30, $$31, $$32, $$33, $$34, $$35, $$36, $$37] <- default.orders
+                                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                                          exchange 
+                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                            empty-tuple-source
+                                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$21]  |PARTITIONED|
+                                                  data-scan [$$21, $$22, $$23, $$24, $$25, $$26, $$28]<-[$$21, $$22, $$23, $$24, $$25, $$26, $$27, $$28] <- default.customer
+                                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                                    exchange 
+                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                      empty-tuple-source
+                                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q11_important_stock.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q11_important_stock.plan
new file mode 100644
index 0000000..70ad7ee
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q11_important_stock.plan
@@ -0,0 +1,126 @@
+write [%0->$$17, %0->$$18]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$18])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      group by ([$$17 := %0->$$21]) decor ([]) {
+                aggregate [$$18] <- [function-call: hive:sum(FINAL), Args:[%0->$$20]]
+                -- AGGREGATE  |LOCAL|
+                  nested tuple source
+                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+             }
+      -- EXTERNAL_GROUP_BY[$$21]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$21]  |PARTITIONED|
+          group by ([$$21 := %0->$$1]) decor ([]) {
+                    aggregate [$$20] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$4, %0->$$3]]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$1]  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              project ([$$1, $$3, $$4])
+              -- STREAM_PROJECT  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  join (function-call: algebricks:eq, Args:[%0->$$2, %0->$$6])
+                  -- HYBRID_HASH_JOIN [$$2][$$6]  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                      data-scan [$$2, $$1, $$3, $$4]<-[$$1, $$2, $$3, $$4, $$5] <- default.partsupp
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          empty-tuple-source
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$6]  |PARTITIONED|
+                      project ([$$6])
+                      -- STREAM_PROJECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          join (function-call: algebricks:eq, Args:[%0->$$9, %0->$$13])
+                          -- HYBRID_HASH_JOIN [$$9][$$13]  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$9]  |PARTITIONED|
+                              data-scan [$$9, $$6]<-[$$6, $$7, $$8, $$9, $$10, $$11, $$12] <- default.supplier
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$13]  |PARTITIONED|
+                              project ([$$13])
+                              -- STREAM_PROJECT  |PARTITIONED|
+                                select (function-call: algebricks:eq, Args:[%0->$$14, GERMANY])
+                                -- STREAM_SELECT  |PARTITIONED|
+                                  exchange 
+                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                    data-scan [$$13, $$14]<-[$$13, $$14, $$15, $$16] <- default.nation
+                                    -- DATASOURCE_SCAN  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        empty-tuple-source
+                                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$3]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    group by ([]) decor ([]) {
+              aggregate [$$3] <- [function-call: hive:sum(FINAL), Args:[%0->$$5]]
+              -- AGGREGATE  |LOCAL|
+                nested tuple source
+                -- NESTED_TUPLE_SOURCE  |LOCAL|
+           }
+    -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+      exchange 
+      -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$5] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$2]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            data-scan [$$2]<-[$$1, $$2] <- default.q11_part_tmp
+            -- DATASOURCE_SCAN  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                empty-tuple-source
+                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$2, %0->$$3]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- SORT_MERGE_EXCHANGE [$$3(DESC) ]  |PARTITIONED|
+    order (DESC, %0->$$3) 
+    -- STABLE_SORT [$$3(DESC)]  |LOCAL|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$2, $$3])
+        -- STREAM_PROJECT  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            join (function-call: algebricks:and, Args:[function-call: algebricks:gt, Args:[%0->$$3, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$1, 1.0E-4]], true])
+            -- NESTED_LOOP  |PARTITIONED|
+              exchange 
+              -- BROADCAST_EXCHANGE  |PARTITIONED|
+                data-scan []<-[$$1] <- default.q11_sum_tmp
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan [$$2, $$3]<-[$$2, $$3] <- default.q11_part_tmp
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q12_shipping.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q12_shipping.plan
new file mode 100644
index 0000000..5c240e2
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q12_shipping.plan
@@ -0,0 +1,58 @@
+write [%0->$$26, %0->$$29, %0->$$30]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$26, $$29, $$30])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$29, $$30] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToDouble, Args:[%0->$$27], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToDouble, Args:[%0->$$28]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$26(ASC) ]  |PARTITIONED|
+        order (ASC, %0->$$26) 
+        -- STABLE_SORT [$$26(ASC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$26 := %0->$$34]) decor ([]) {
+                      aggregate [$$27, $$28] <- [function-call: hive:sum(FINAL), Args:[%0->$$32], function-call: hive:sum(FINAL), Args:[%0->$$33]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$34]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$34]  |PARTITIONED|
+                group by ([$$34 := %0->$$24]) decor ([]) {
+                          aggregate [$$32, $$33] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen, Args:[function-call: algebricks:or, Args:[function-call: algebricks:eq, Args:[%0->$$6, 1-URGENT], function-call: algebricks:eq, Args:[%0->$$6, 2-HIGH]], 1, 0]], function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen, Args:[function-call: algebricks:and, Args:[function-call: algebricks:neq, Args:[%0->$$6, 1-URGENT], function-call: algebricks:neq, Args:[%0->$$6, 2-HIGH]], 1, 0]]]
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$24]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$6, $$24])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        join (function-call: algebricks:eq, Args:[%0->$$10, %0->$$1])
+                        -- HYBRID_HASH_JOIN [$$10][$$1]  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$10]  |PARTITIONED|
+                            project ([$$10, $$24])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$22, 1995-01-01], function-call: algebricks:ge, Args:[%0->$$22, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$20, %0->$$21], function-call: algebricks:lt, Args:[%0->$$21, %0->$$22], function-call: algebricks:lt, Args:[%0->$$20, %0->$$21], function-call: algebricks:ge, Args:[%0->$$22, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$22, 1995-01-01], function-call: algebricks:or, Args:[function-call: algebricks:eq, Args:[%0->$$24, MAIL], function-call: algebricks:eq, Args:[%0->$$24, SHIP]], function-call: algebricks:or, Args:[function-call: algebricks:eq, Args:[%0->$$24, MAIL], function-call: algebricks:eq, Args:[%0->$$24, SHIP]]])
+                              -- STREAM_SELECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  data-scan [$$21, $$20, $$22, $$24, $$10]<-[$$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25] <- default.lineitem
+                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      empty-tuple-source
+                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                            data-scan [$$1, $$6]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.orders
+                            -- DATASOURCE_SCAN  |PARTITIONED|
+                              exchange 
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                empty-tuple-source
+                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q13_customer_distribution.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q13_customer_distribution.plan
new file mode 100644
index 0000000..19bcd24
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q13_customer_distribution.plan
@@ -0,0 +1,80 @@
+write [%0->$$22, %0->$$23]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$22, $$23])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$22, $$23] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$20], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$21]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$21(DESC), $$20(DESC) ]  |PARTITIONED|
+        order (DESC, %0->$$21) (DESC, %0->$$20) 
+        -- STABLE_SORT [$$21(DESC), $$20(DESC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$20 := %0->$$28]) decor ([]) {
+                      aggregate [$$21] <- [function-call: hive:count(FINAL), Args:[%0->$$27]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$28]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$28]  |PARTITIONED|
+                group by ([$$28 := %0->$$19]) decor ([]) {
+                          aggregate [$$27] <- [function-call: hive:count(PARTIAL1), Args:[1]]
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$19]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$19])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        group by ([$$18 := %0->$$26]) decor ([]) {
+                                  aggregate [$$19] <- [function-call: hive:count(FINAL), Args:[%0->$$25]]
+                                  -- AGGREGATE  |LOCAL|
+                                    nested tuple source
+                                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+                               }
+                        -- EXTERNAL_GROUP_BY[$$26]  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$26]  |PARTITIONED|
+                            group by ([$$26 := %0->$$10]) decor ([]) {
+                                      aggregate [$$25] <- [function-call: hive:count(PARTIAL1), Args:[%0->$$1]]
+                                      -- AGGREGATE  |LOCAL|
+                                        nested tuple source
+                                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                                   }
+                            -- EXTERNAL_GROUP_BY[$$10]  |LOCAL|
+                              exchange 
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                project ([$$10, $$1])
+                                -- STREAM_PROJECT  |PARTITIONED|
+                                  exchange 
+                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                    left outer join (function-call: algebricks:eq, Args:[%0->$$2, %0->$$10])
+                                    -- HYBRID_HASH_JOIN [$$10][$$2]  |PARTITIONED|
+                                      exchange 
+                                      -- HASH_PARTITION_EXCHANGE [$$10]  |PARTITIONED|
+                                        data-scan [$$10]<-[$$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17] <- default.customer
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            empty-tuple-source
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                      exchange 
+                                      -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                                        project ([$$2, $$1])
+                                        -- STREAM_PROJECT  |PARTITIONED|
+                                          select (function-call: algebricks:not, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$9, %special%requests%]])
+                                          -- STREAM_SELECT  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              data-scan [$$1, $$2, $$9]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.orders
+                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                exchange 
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  empty-tuple-source
+                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q14_promotion_effect.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q14_promotion_effect.plan
new file mode 100644
index 0000000..21b90bd
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q14_promotion_effect.plan
@@ -0,0 +1,54 @@
+write [%0->$$28]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$28])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$28] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPDivide, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[100.0, %0->$$26], %0->$$27]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$26, $$27] <- [function-call: hive:sum(FINAL), Args:[%0->$$30], function-call: hive:sum(FINAL), Args:[%0->$$31]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+            group by ([]) decor ([]) {
+                      aggregate [$$30, $$31] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$5, PROMO%], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$15, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$16]], 0.0]], function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$15, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$16]]]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$5, $$15, $$16])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    join (function-call: algebricks:eq, Args:[%0->$$11, %0->$$1])
+                    -- HYBRID_HASH_JOIN [$$11][$$1]  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$11]  |PARTITIONED|
+                        project ([$$11, $$15, $$16])
+                        -- STREAM_PROJECT  |PARTITIONED|
+                          select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$20, 1995-10-01], function-call: algebricks:ge, Args:[%0->$$20, 1995-09-01], function-call: algebricks:lt, Args:[%0->$$20, 1995-10-01]])
+                          -- STREAM_SELECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              data-scan [$$16, $$20, $$11, $$15]<-[$$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25] <- default.lineitem
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                        data-scan [$$1, $$5]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.part
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q15_top_supplier.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q15_top_supplier.plan
new file mode 100644
index 0000000..a5bd27a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q15_top_supplier.plan
@@ -0,0 +1,110 @@
+write [%0->$$17, %0->$$18]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$18])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      group by ([$$17 := %0->$$21]) decor ([]) {
+                aggregate [$$18] <- [function-call: hive:sum(FINAL), Args:[%0->$$20]]
+                -- AGGREGATE  |LOCAL|
+                  nested tuple source
+                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+             }
+      -- EXTERNAL_GROUP_BY[$$21]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$21]  |PARTITIONED|
+          group by ([$$21 := %0->$$3]) decor ([]) {
+                    aggregate [$$20] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]]]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$3]  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              project ([$$3, $$6, $$7])
+              -- STREAM_PROJECT  |PARTITIONED|
+                select (function-call: algebricks:and, Args:[function-call: algebricks:ge, Args:[%0->$$11, 1996-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1996-04-01], function-call: algebricks:ge, Args:[%0->$$11, 1996-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1996-04-01]])
+                -- STREAM_SELECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$3, $$6, $$7, $$11]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$3]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    group by ([]) decor ([]) {
+              aggregate [$$3] <- [function-call: hive:max(FINAL), Args:[%0->$$5]]
+              -- AGGREGATE  |LOCAL|
+                nested tuple source
+                -- NESTED_TUPLE_SOURCE  |LOCAL|
+           }
+    -- PRE_CLUSTERED_GROUP_BY[]  |PARTITIONED|
+      exchange 
+      -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$5] <- [function-call: hive:max(PARTIAL1), Args:[%0->$$2]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- PRE_CLUSTERED_GROUP_BY[]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            data-scan [$$2]<-[$$1, $$2] <- default.revenue
+            -- DATASOURCE_SCAN  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                empty-tuple-source
+                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$2, %0->$$3, %0->$$4, %0->$$6, %0->$$10]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- SORT_MERGE_EXCHANGE [$$2(ASC) ]  |PARTITIONED|
+    order (ASC, %0->$$2) 
+    -- STABLE_SORT [$$2(ASC)]  |LOCAL|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$2, $$3, $$4, $$6, $$10])
+        -- STREAM_PROJECT  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$10])
+            -- HYBRID_HASH_JOIN [$$1][$$10]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                data-scan []<-[$$1] <- default.max_revenue
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$10]  |PARTITIONED|
+                project ([$$10, $$2, $$3, $$4, $$6])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    join (function-call: algebricks:eq, Args:[%0->$$9, %0->$$2])
+                    -- HYBRID_HASH_JOIN [$$9][$$2]  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$9]  |PARTITIONED|
+                        data-scan []<-[$$9, $$10] <- default.revenue
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                        data-scan [$$2, $$3, $$4, $$6]<-[$$2, $$3, $$4, $$5, $$6, $$7, $$8] <- default.supplier
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q16_parts_supplier_relationship.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q16_parts_supplier_relationship.plan
new file mode 100644
index 0000000..9835346
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q16_parts_supplier_relationship.plan
@@ -0,0 +1,98 @@
+write [%0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$1])
+  -- STREAM_PROJECT  |PARTITIONED|
+    select (function-call: algebricks:not, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$7, %Customer%Complaints%]])
+    -- STREAM_SELECT  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        data-scan [$$1, $$7]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+        -- DATASOURCE_SCAN  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            empty-tuple-source
+            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$5, %0->$$6, %0->$$7, %0->$$12]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$5, $$6, $$7, $$12])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$12])
+      -- HYBRID_HASH_JOIN [$$1][$$12]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+          data-scan []<-[$$1] <- default.supplier_tmp
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$12]  |PARTITIONED|
+          project ([$$12, $$5, $$6, $$7])
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              join (function-call: algebricks:eq, Args:[%0->$$2, %0->$$11])
+              -- HYBRID_HASH_JOIN [$$2][$$11]  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                  select (function-call: algebricks:and, Args:[function-call: algebricks:not, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$6, MEDIUM POLISHED%]], function-call: algebricks:neq, Args:[%0->$$5, Brand#45], function-call: algebricks:not, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$6, MEDIUM POLISHED%]]])
+                  -- STREAM_SELECT  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      data-scan [$$2, $$5, $$6, $$7]<-[$$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10] <- default.part
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          empty-tuple-source
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$11]  |PARTITIONED|
+                  data-scan [$$11, $$12]<-[$$11, $$12, $$13, $$14, $$15] <- default.partsupp
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$10, %0->$$11, %0->$$12, %0->$$14]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$10, $$11, $$12, $$14])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$14] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$13]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$13(DESC), $$10(ASC), $$11(ASC), $$12(ASC) ]  |PARTITIONED|
+        order (DESC, %0->$$13) (ASC, %0->$$10) (ASC, %0->$$11) (ASC, %0->$$12) 
+        -- STABLE_SORT [$$13(DESC), $$10(ASC), $$11(ASC), $$12(ASC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$10 := %0->$$5; $$11 := %0->$$6; $$12 := %0->$$7]) decor ([]) {
+                      aggregate [$$13] <- [function-call: hive:count(COMPLETE), Args:[%0->$$8]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$5, $$6, $$7]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$5, $$6, $$7]  |PARTITIONED|
+                group by ([$$5 := %0->$$1; $$6 := %0->$$2; $$7 := %0->$$3; $$8 := %0->$$4]) decor ([]) {
+                          aggregate [] <- []
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$1, $$2, $$3, $$4]  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    select (function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:eq, Args:[%0->$$3, 49], function-call: algebricks:eq, Args:[%0->$$3, 14]], function-call: algebricks:eq, Args:[%0->$$3, 23]], function-call: algebricks:eq, Args:[%0->$$3, 45]], function-call: algebricks:eq, Args:[%0->$$3, 19]], function-call: algebricks:eq, Args:[%0->$$3, 3]], function-call: algebricks:eq, Args:[%0->$$3, 36]], function-call: algebricks:eq, Args:[%0->$$3, 9]])
+                    -- STREAM_SELECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        data-scan []<-[$$1, $$2, $$3, $$4] <- default.q16_tmp
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q17_small_quantity_order_revenue.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q17_small_quantity_order_revenue.plan
new file mode 100644
index 0000000..a827007
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q17_small_quantity_order_revenue.plan
@@ -0,0 +1,104 @@
+write [%0->$$17, %0->$$19]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$19])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$19] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[0.2, %0->$$18]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([$$17 := %0->$$22]) decor ([]) {
+                  aggregate [$$18] <- [function-call: hive:avg(FINAL), Args:[%0->$$21]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[$$22]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$22]  |PARTITIONED|
+            group by ([$$22 := %0->$$2]) decor ([]) {
+                      aggregate [$$21] <- [function-call: hive:avg(PARTIAL1), Args:[%0->$$5]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$2]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan [$$2, $$5]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$29]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$29])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$29] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPDivide, Args:[%0->$$28, 7.0]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$28] <- [function-call: hive:sum(FINAL), Args:[%0->$$31]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+            group by ([]) decor ([]) {
+                      aggregate [$$31] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$17]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$17])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  select (function-call: algebricks:lt, Args:[%0->$$16, %0->$$2])
+                  -- STREAM_SELECT  |UNPARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      join (function-call: algebricks:eq, Args:[%0->$$13, %0->$$1])
+                      -- HYBRID_HASH_JOIN [$$13][$$1]  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$13, $$16, $$17])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$13, %0->$$3])
+                              -- HYBRID_HASH_JOIN [$$13][$$3]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$13]  |PARTITIONED|
+                                  data-scan [$$13, $$16, $$17]<-[$$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25, $$26, $$27] <- default.lineitem
+                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      empty-tuple-source
+                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$3]  |PARTITIONED|
+                                  project ([$$3])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    select (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$9, MED BOX], function-call: algebricks:eq, Args:[%0->$$6, Brand#23], function-call: algebricks:eq, Args:[%0->$$9, MED BOX]])
+                                    -- STREAM_SELECT  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        data-scan [$$3, $$6, $$9]<-[$$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11] <- default.part
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            empty-tuple-source
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                        exchange 
+                        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                          data-scan [$$1, $$2]<-[$$1, $$2] <- default.lineitem_tmp
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q18_large_volume_customer.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q18_large_volume_customer.plan
new file mode 100644
index 0000000..ea47ea0
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q18_large_volume_customer.plan
@@ -0,0 +1,126 @@
+write [%0->$$17, %0->$$18]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$18])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      group by ([$$17 := %0->$$21]) decor ([]) {
+                aggregate [$$18] <- [function-call: hive:sum(FINAL), Args:[%0->$$20]]
+                -- AGGREGATE  |LOCAL|
+                  nested tuple source
+                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+             }
+      -- EXTERNAL_GROUP_BY[$$21]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$21]  |PARTITIONED|
+          group by ([$$21 := %0->$$1]) decor ([]) {
+                    aggregate [$$20] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$5]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$1]  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              data-scan [$$1, $$5]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+              -- DATASOURCE_SCAN  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  empty-tuple-source
+                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$36, %0->$$37, %0->$$38, %0->$$39, %0->$$40, %0->$$41]
+-- SINK_WRITE  |UNPARTITIONED|
+  project ([$$36, $$37, $$38, $$39, $$40, $$41])
+  -- STREAM_PROJECT  |PARTITIONED|
+    limit 100
+    -- STREAM_LIMIT  |UNPARTITIONED|
+      limit 100
+      -- STREAM_LIMIT  |UNPARTITIONED|
+        exchange 
+        -- SORT_MERGE_EXCHANGE [$$40(DESC), $$39(ASC) ]  |PARTITIONED|
+          limit 100
+          -- STREAM_LIMIT  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+              order (DESC, %0->$$40) (ASC, %0->$$39) 
+              -- STABLE_SORT [$$40(DESC), $$39(ASC)]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  group by ([$$36 := %0->$$44; $$37 := %0->$$45; $$38 := %0->$$46; $$39 := %0->$$47; $$40 := %0->$$48]) decor ([]) {
+                            aggregate [$$41] <- [function-call: hive:sum(FINAL), Args:[%0->$$43]]
+                            -- AGGREGATE  |LOCAL|
+                              nested tuple source
+                              -- NESTED_TUPLE_SOURCE  |LOCAL|
+                         }
+                  -- EXTERNAL_GROUP_BY[$$44, $$45, $$46, $$47, $$48]  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$44, $$45, $$46, $$47, $$48]  |PARTITIONED|
+                      group by ([$$44 := %0->$$20; $$45 := %0->$$19; $$46 := %0->$$27; $$47 := %0->$$31; $$48 := %0->$$30]) decor ([]) {
+                                aggregate [$$43] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$7]]
+                                -- AGGREGATE  |LOCAL|
+                                  nested tuple source
+                                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+                             }
+                      -- EXTERNAL_GROUP_BY[$$20, $$19, $$27, $$31, $$30]  |LOCAL|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          project ([$$19, $$20, $$27, $$30, $$31, $$7])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$27, %0->$$1])
+                              -- HYBRID_HASH_JOIN [$$1][$$27]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                                  project ([$$1, $$7])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      join (function-call: algebricks:eq, Args:[%0->$$3, %0->$$1])
+                                      -- HYBRID_HASH_JOIN [$$3][$$1]  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$3]  |PARTITIONED|
+                                          data-scan [$$3, $$7]<-[$$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17, $$18] <- default.lineitem
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                                          project ([$$1])
+                                          -- STREAM_PROJECT  |PARTITIONED|
+                                            select (function-call: algebricks:gt, Args:[%0->$$2, 300])
+                                            -- STREAM_SELECT  |PARTITIONED|
+                                              exchange 
+                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                data-scan []<-[$$1, $$2] <- default.q18_tmp
+                                                -- DATASOURCE_SCAN  |PARTITIONED|
+                                                  exchange 
+                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                    empty-tuple-source
+                                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$27]  |PARTITIONED|
+                                  project ([$$27, $$19, $$20, $$30, $$31])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      join (function-call: algebricks:eq, Args:[%0->$$28, %0->$$19])
+                                      -- HYBRID_HASH_JOIN [$$28][$$19]  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$28]  |PARTITIONED|
+                                          data-scan [$$28, $$27, $$30, $$31]<-[$$27, $$28, $$29, $$30, $$31, $$32, $$33, $$34, $$35] <- default.orders
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$19]  |PARTITIONED|
+                                          data-scan [$$19, $$20]<-[$$19, $$20, $$21, $$22, $$23, $$24, $$25, $$26] <- default.customer
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q19_discounted_revenue.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q19_discounted_revenue.plan
new file mode 100644
index 0000000..1827729
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q19_discounted_revenue.plan
@@ -0,0 +1,46 @@
+write [%0->$$26]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    group by ([]) decor ([]) {
+              aggregate [$$26] <- [function-call: hive:sum(FINAL), Args:[%0->$$28]]
+              -- AGGREGATE  |LOCAL|
+                nested tuple source
+                -- NESTED_TUPLE_SOURCE  |LOCAL|
+           }
+    -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+      exchange 
+      -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$28] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$15, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$16]]]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            project ([$$15, $$16])
+            -- STREAM_PROJECT  |PARTITIONED|
+              select (function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$4, Brand#12], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$7, SM CASE||SM BOX||SM PACK||SM PKG]], function-call: algebricks:ge, Args:[%0->$$14, 1]], function-call: algebricks:le, Args:[%0->$$14, 11]], function-call: algebricks:ge, Args:[%0->$$6, 1]], function-call: algebricks:le, Args:[%0->$$6, 5]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$24, AIR||AIR REG]], function-call: algebricks:eq, Args:[%0->$$23, DELIVER IN PERSON]], function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$4, Brand#23], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$7, MED BAG||MED BOX||MED PKG||MED PACK]], function-call: algebricks:ge, Args:[%0->$$14, 10]], function-call: algebricks:le, Args:[%0->$$14, 20]], function-call: algebricks:ge, Args:[%0->$$6, 1]], function-call: algebricks:le, Args:[%0->$$6, 10]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$24, AIR||AIR REG]], function-call: algebricks:eq, Args:[%0->$$23, DELIVER IN PERSON]]], function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$4, Brand#34], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$7, LG CASE||LG BOX||LG PACK||LG PKG]], function-call: algebricks:ge, Args:[%0->$$14, 20]], function-call: algebricks:le, Args:[%0->$$14, 30]], function-call: algebricks:ge, Args:[%0->$$6, 1]], function-call: algebricks:le, Args:[%0->$$6, 15]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFRegExp, Args:[%0->$$24, AIR||AIR REG]], function-call: algebricks:eq, Args:[%0->$$23, DELIVER IN PERSON]]])
+              -- STREAM_SELECT  |UNPARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  join (function-call: algebricks:eq, Args:[%0->$$11, %0->$$1])
+                  -- HYBRID_HASH_JOIN [$$11][$$1]  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$11]  |PARTITIONED|
+                      data-scan [$$11, $$14, $$15, $$16, $$23, $$24]<-[$$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25] <- default.lineitem
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          empty-tuple-source
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                    exchange 
+                    -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                      data-scan [$$1, $$4, $$6, $$7]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.part
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          empty-tuple-source
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q1_pricing_summary_report.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q1_pricing_summary_report.plan
new file mode 100644
index 0000000..0e9c90f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q1_pricing_summary_report.plan
@@ -0,0 +1,42 @@
+write [%0->$$17, %0->$$18, %0->$$19, %0->$$20, %0->$$21, %0->$$22, %0->$$23, %0->$$24, %0->$$25, %0->$$27]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25, $$27])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$27] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$26]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$17(ASC), $$18(ASC) ]  |PARTITIONED|
+        order (ASC, %0->$$17) (ASC, %0->$$18) 
+        -- STABLE_SORT [$$17(ASC), $$18(ASC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$17 := %0->$$37; $$18 := %0->$$38]) decor ([]) {
+                      aggregate [$$19, $$20, $$21, $$22, $$23, $$24, $$25, $$26] <- [function-call: hive:sum(FINAL), Args:[%0->$$29], function-call: hive:sum(FINAL), Args:[%0->$$30], function-call: hive:sum(FINAL), Args:[%0->$$31], function-call: hive:sum(FINAL), Args:[%0->$$32], function-call: hive:avg(FINAL), Args:[%0->$$33], function-call: hive:avg(FINAL), Args:[%0->$$34], function-call: hive:avg(FINAL), Args:[%0->$$35], function-call: hive:count(FINAL), Args:[%0->$$36]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$37, $$38]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$37, $$38]  |PARTITIONED|
+                group by ([$$37 := %0->$$9; $$38 := %0->$$10]) decor ([]) {
+                          aggregate [$$29, $$30, $$31, $$32, $$33, $$34, $$35, $$36] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$5], function-call: hive:sum(PARTIAL1), Args:[%0->$$6], function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]]], function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPPlus, Args:[1, %0->$$8]]], function-call: hive:avg(PARTIAL1), Args:[%0->$$5], function-call: hive:avg(PARTIAL1), Args:[%0->$$6], function-call: hive:avg(PARTIAL1), Args:[%0->$$7], function-call: hive:count(PARTIAL1), Args:[1]]
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$9, $$10]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$9, $$10, $$5, $$6, $$7, $$8])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      select (function-call: algebricks:le, Args:[%0->$$11, 1998-09-02])
+                      -- STREAM_SELECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          data-scan [$$5, $$6, $$7, $$8, $$9, $$10, $$11]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q20_potential_part_promotion.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q20_potential_part_promotion.plan
new file mode 100644
index 0000000..eddfca5
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q20_potential_part_promotion.plan
@@ -0,0 +1,178 @@
+write [%0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    distinct ([%0->$$1])
+    -- PRE_SORTED_DISTINCT_BY  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+        order (ASC, %0->$$1) 
+        -- STABLE_SORT [$$1(ASC)]  |LOCAL|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+            project ([$$1])
+            -- STREAM_PROJECT  |PARTITIONED|
+              select (function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$2, forest%])
+              -- STREAM_SELECT  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan [$$1, $$2]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.part
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$17, %0->$$18, %0->$$20]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$17, $$18, $$20])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$20] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[0.5, %0->$$19]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([$$17 := %0->$$23; $$18 := %0->$$24]) decor ([]) {
+                  aggregate [$$19] <- [function-call: hive:sum(FINAL), Args:[%0->$$22]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[$$23, $$24]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$23, $$24]  |PARTITIONED|
+            group by ([$$23 := %0->$$2; $$24 := %0->$$3]) decor ([]) {
+                      aggregate [$$22] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$5]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$2, $$3]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$2, $$3, $$5])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  select (function-call: algebricks:and, Args:[function-call: algebricks:ge, Args:[%0->$$11, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1995-01-01], function-call: algebricks:ge, Args:[%0->$$11, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1995-01-01]])
+                  -- STREAM_SELECT  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      data-scan [$$2, $$3, $$5, $$11]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                      -- DATASOURCE_SCAN  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          empty-tuple-source
+                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$6, %0->$$7, %0->$$3]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$6, $$7, $$3])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$1, %0->$$5], function-call: algebricks:eq, Args:[%0->$$2, %0->$$6]])
+      -- HYBRID_HASH_JOIN [$$1, $$2][$$5, $$6]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$1, $$2]  |PARTITIONED|
+          data-scan []<-[$$1, $$2, $$3] <- default.q20_tmp2
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$5, $$6]  |PARTITIONED|
+          project ([$$5, $$6, $$7])
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              join (function-call: algebricks:eq, Args:[%0->$$4, %0->$$5])
+              -- HYBRID_HASH_JOIN [$$4][$$5]  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
+                  data-scan []<-[$$4] <- default.q20_tmp1
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$5]  |PARTITIONED|
+                  data-scan [$$5, $$6, $$7]<-[$$5, $$6, $$7, $$8, $$9] <- default.partsupp
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    distinct ([%0->$$1])
+    -- PRE_SORTED_DISTINCT_BY  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+        order (ASC, %0->$$1) 
+        -- STABLE_SORT [$$1(ASC)]  |LOCAL|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+            project ([$$1])
+            -- STREAM_PROJECT  |PARTITIONED|
+              select (function-call: algebricks:gt, Args:[%0->$$2, %0->$$3])
+              -- STREAM_SELECT  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan []<-[$$1, $$2, $$3] <- default.q20_tmp3
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$3, %0->$$4]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- SORT_MERGE_EXCHANGE [$$3(ASC) ]  |PARTITIONED|
+    order (ASC, %0->$$3) 
+    -- STABLE_SORT [$$3(ASC)]  |LOCAL|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$3, $$4])
+        -- STREAM_PROJECT  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$2])
+            -- HYBRID_HASH_JOIN [$$1][$$2]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                data-scan []<-[$$1] <- default.q20_tmp4
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                project ([$$2, $$3, $$4])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    join (function-call: algebricks:eq, Args:[%0->$$9, %0->$$5])
+                    -- HYBRID_HASH_JOIN [$$9][$$5]  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$9]  |PARTITIONED|
+                        project ([$$9])
+                        -- STREAM_PROJECT  |PARTITIONED|
+                          select (function-call: algebricks:eq, Args:[%0->$$10, CANADA])
+                          -- STREAM_SELECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              data-scan [$$9, $$10]<-[$$9, $$10, $$11, $$12] <- default.nation
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$5]  |PARTITIONED|
+                        data-scan [$$5, $$2, $$3, $$4]<-[$$2, $$3, $$4, $$5, $$6, $$7, $$8] <- default.supplier
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q21_suppliers_who_kept_orders_waiting.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q21_suppliers_who_kept_orders_waiting.plan
new file mode 100644
index 0000000..cc47cf3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q21_suppliers_who_kept_orders_waiting.plan
@@ -0,0 +1,224 @@
+write [%0->$$21, %0->$$24, %0->$$23]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$21, $$24, $$23])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$24] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$22]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([$$21 := %0->$$17]) decor ([]) {
+                  aggregate [$$22, $$23] <- [function-call: hive:count(COMPLETE), Args:[%0->$$18], function-call: hive:max(FINAL), Args:[%0->$$20]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- PRE_CLUSTERED_GROUP_BY[$$17]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$17(ASC)] HASH:[$$17]  |PARTITIONED|
+            group by ([$$17 := %0->$$1; $$18 := %0->$$3]) decor ([]) {
+                      aggregate [$$20] <- [function-call: hive:max(PARTIAL1), Args:[%0->$$3]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- PRE_CLUSTERED_GROUP_BY[$$1, $$3]  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+                order (ASC, %0->$$1) (ASC, %0->$$3) 
+                -- STABLE_SORT [$$1(ASC), $$3(ASC)]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$1, $$3]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$21, %0->$$24, %0->$$23]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$21, $$24, $$23])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$24] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$22]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        group by ([$$21 := %0->$$17]) decor ([]) {
+                  aggregate [$$22, $$23] <- [function-call: hive:count(COMPLETE), Args:[%0->$$18], function-call: hive:max(FINAL), Args:[%0->$$20]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- PRE_CLUSTERED_GROUP_BY[$$17]  |PARTITIONED|
+          exchange 
+          -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$17(ASC)] HASH:[$$17]  |PARTITIONED|
+            group by ([$$17 := %0->$$1; $$18 := %0->$$3]) decor ([]) {
+                      aggregate [$$20] <- [function-call: hive:max(PARTIAL1), Args:[%0->$$3]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- PRE_CLUSTERED_GROUP_BY[$$1, $$3]  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+                order (ASC, %0->$$1) (ASC, %0->$$3) 
+                -- STABLE_SORT [$$1(ASC), $$3(ASC)]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$1, $$3])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      select (function-call: algebricks:gt, Args:[%0->$$13, %0->$$12])
+                      -- STREAM_SELECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          data-scan [$$1, $$3, $$12, $$13]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$43, %0->$$45]
+-- SINK_WRITE  |UNPARTITIONED|
+  project ([$$43, $$45])
+  -- STREAM_PROJECT  |UNPARTITIONED|
+    assign [$$45] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$44]]
+    -- ASSIGN  |UNPARTITIONED|
+      limit 100
+      -- STREAM_LIMIT  |UNPARTITIONED|
+        limit 100
+        -- STREAM_LIMIT  |UNPARTITIONED|
+          exchange 
+          -- SORT_MERGE_EXCHANGE [$$44(DESC), $$43(ASC) ]  |PARTITIONED|
+            limit 100
+            -- STREAM_LIMIT  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+                order (DESC, %0->$$44) (ASC, %0->$$43) 
+                -- STABLE_SORT [$$44(DESC), $$43(ASC)]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    group by ([$$43 := %0->$$48]) decor ([]) {
+                              aggregate [$$44] <- [function-call: hive:count(FINAL), Args:[%0->$$47]]
+                              -- AGGREGATE  |LOCAL|
+                                nested tuple source
+                                -- NESTED_TUPLE_SOURCE  |LOCAL|
+                           }
+                    -- EXTERNAL_GROUP_BY[$$48]  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$48]  |PARTITIONED|
+                        group by ([$$48 := %0->$$37]) decor ([]) {
+                                  aggregate [$$47] <- [function-call: hive:count(PARTIAL1), Args:[1]]
+                                  -- AGGREGATE  |LOCAL|
+                                    nested tuple source
+                                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+                               }
+                        -- EXTERNAL_GROUP_BY[$$37]  |LOCAL|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            project ([$$37])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              select (function-call: algebricks:or, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull, Args:[%0->$$2], function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$2, 1], function-call: algebricks:eq, Args:[%0->$$18, %0->$$3]]])
+                              -- STREAM_SELECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  left outer join (function-call: algebricks:eq, Args:[%0->$$4, %0->$$1])
+                                  -- HYBRID_HASH_JOIN [$$4][$$1]  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
+                                      project ([$$37, $$4, $$18])
+                                      -- STREAM_PROJECT  |PARTITIONED|
+                                        select (function-call: algebricks:or, Args:[function-call: algebricks:gt, Args:[%0->$$5, 1], function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$5, 1], function-call: algebricks:neq, Args:[%0->$$18, %0->$$6]]])
+                                        -- STREAM_SELECT  |UNPARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            join (function-call: algebricks:eq, Args:[%0->$$16, %0->$$4])
+                                            -- HYBRID_HASH_JOIN [$$16][$$4]  |PARTITIONED|
+                                              exchange 
+                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                project ([$$37, $$16, $$18])
+                                                -- STREAM_PROJECT  |PARTITIONED|
+                                                  exchange 
+                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                    join (function-call: algebricks:eq, Args:[%0->$$16, %0->$$7])
+                                                    -- HYBRID_HASH_JOIN [$$16][$$7]  |PARTITIONED|
+                                                      exchange 
+                                                      -- HASH_PARTITION_EXCHANGE [$$16]  |PARTITIONED|
+                                                        project ([$$37, $$16, $$18])
+                                                        -- STREAM_PROJECT  |PARTITIONED|
+                                                          exchange 
+                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                            join (function-call: algebricks:eq, Args:[%0->$$18, %0->$$36])
+                                                            -- HYBRID_HASH_JOIN [$$18][$$36]  |PARTITIONED|
+                                                              exchange 
+                                                              -- HASH_PARTITION_EXCHANGE [$$18]  |PARTITIONED|
+                                                                project ([$$16, $$18])
+                                                                -- STREAM_PROJECT  |PARTITIONED|
+                                                                  select (function-call: algebricks:and, Args:[function-call: algebricks:gt, Args:[%0->$$28, %0->$$27], function-call: algebricks:gt, Args:[%0->$$28, %0->$$27]])
+                                                                  -- STREAM_SELECT  |PARTITIONED|
+                                                                    exchange 
+                                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                      data-scan [$$16, $$18, $$27, $$28]<-[$$16, $$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25, $$26, $$27, $$28, $$29, $$30, $$31] <- default.lineitem
+                                                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                        exchange 
+                                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                          empty-tuple-source
+                                                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                              exchange 
+                                                              -- HASH_PARTITION_EXCHANGE [$$36]  |PARTITIONED|
+                                                                project ([$$36, $$37])
+                                                                -- STREAM_PROJECT  |PARTITIONED|
+                                                                  exchange 
+                                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                    join (function-call: algebricks:eq, Args:[%0->$$39, %0->$$32])
+                                                                    -- HYBRID_HASH_JOIN [$$39][$$32]  |PARTITIONED|
+                                                                      exchange 
+                                                                      -- HASH_PARTITION_EXCHANGE [$$39]  |PARTITIONED|
+                                                                        data-scan [$$39, $$36, $$37]<-[$$36, $$37, $$38, $$39, $$40, $$41, $$42] <- default.supplier
+                                                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                          exchange 
+                                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                            empty-tuple-source
+                                                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                                      exchange 
+                                                                      -- HASH_PARTITION_EXCHANGE [$$32]  |PARTITIONED|
+                                                                        project ([$$32])
+                                                                        -- STREAM_PROJECT  |PARTITIONED|
+                                                                          select (function-call: algebricks:eq, Args:[%0->$$33, SAUDI ARABIA])
+                                                                          -- STREAM_SELECT  |PARTITIONED|
+                                                                            exchange 
+                                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                              data-scan [$$32, $$33]<-[$$32, $$33, $$34, $$35] <- default.nation
+                                                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                                exchange 
+                                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                  empty-tuple-source
+                                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                      exchange 
+                                                      -- HASH_PARTITION_EXCHANGE [$$7]  |PARTITIONED|
+                                                        project ([$$7])
+                                                        -- STREAM_PROJECT  |PARTITIONED|
+                                                          select (function-call: algebricks:eq, Args:[%0->$$9, F])
+                                                          -- STREAM_SELECT  |PARTITIONED|
+                                                            exchange 
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              data-scan [$$7, $$9]<-[$$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15] <- default.orders
+                                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                exchange 
+                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                  empty-tuple-source
+                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                              exchange 
+                                              -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
+                                                data-scan [$$4, $$5, $$6]<-[$$4, $$5, $$6] <- default.q21_tmp1
+                                                -- DATASOURCE_SCAN  |PARTITIONED|
+                                                  exchange 
+                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                    empty-tuple-source
+                                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                                      data-scan [$$1, $$2, $$3]<-[$$1, $$2, $$3] <- default.q21_tmp2
+                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          empty-tuple-source
+                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q22_global_sales_opportunity.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q22_global_sales_opportunity.plan
new file mode 100644
index 0000000..591576b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q22_global_sales_opportunity.plan
@@ -0,0 +1,136 @@
+write [%0->$$6, %0->$$1, %0->$$9]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$6, $$1, $$9])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$9] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2]]
+    -- ASSIGN  |PARTITIONED|
+      select (function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:or, Args:[function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 13], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 31]], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 23]], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 29]], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 30]], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 18]], function-call: algebricks:eq, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFSubstr, Args:[%0->$$5, 1, 2], 17]])
+      -- STREAM_SELECT  |PARTITIONED|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          data-scan []<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8] <- default.customer
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$4]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    group by ([]) decor ([]) {
+              aggregate [$$4] <- [function-call: hive:avg(FINAL), Args:[%0->$$6]]
+              -- AGGREGATE  |LOCAL|
+                nested tuple source
+                -- NESTED_TUPLE_SOURCE  |LOCAL|
+           }
+    -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+      exchange 
+      -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$6] <- [function-call: hive:avg(PARTIAL1), Args:[%0->$$1]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            select (function-call: algebricks:gt, Args:[%0->$$1, 0.0])
+            -- STREAM_SELECT  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan [$$1]<-[$$1, $$2, $$3] <- default.q22_customer_tmp
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$2]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    distinct ([%0->$$2])
+    -- PRE_SORTED_DISTINCT_BY  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+        order (ASC, %0->$$2) 
+        -- STABLE_SORT [$$2(ASC)]  |LOCAL|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+            data-scan [$$2]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.orders
+            -- DATASOURCE_SCAN  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                empty-tuple-source
+                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$6, %0->$$9, %0->$$8]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$6, $$9, $$8])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$9] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$7]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$6(ASC) ]  |PARTITIONED|
+        order (ASC, %0->$$6) 
+        -- STABLE_SORT [$$6(ASC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$6 := %0->$$13]) decor ([]) {
+                      aggregate [$$7, $$8] <- [function-call: hive:count(FINAL), Args:[%0->$$11], function-call: hive:sum(FINAL), Args:[%0->$$12]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$13]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$13]  |PARTITIONED|
+                group by ([$$13 := %0->$$5]) decor ([]) {
+                          aggregate [$$11, $$12] <- [function-call: hive:count(PARTIAL1), Args:[1], function-call: hive:sum(PARTIAL1), Args:[%0->$$3]]
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$5]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$5, $$3])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        join (function-call: algebricks:and, Args:[function-call: algebricks:gt, Args:[%0->$$3, %0->$$1], true])
+                        -- NESTED_LOOP  |PARTITIONED|
+                          exchange 
+                          -- BROADCAST_EXCHANGE  |PARTITIONED|
+                            project ([$$5, $$3])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              select (function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull, Args:[%0->$$2])
+                              -- STREAM_SELECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  left outer join (function-call: algebricks:eq, Args:[%0->$$4, %0->$$2])
+                                  -- HYBRID_HASH_JOIN [$$4][$$2]  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$4]  |PARTITIONED|
+                                      data-scan []<-[$$3, $$4, $$5] <- default.q22_customer_tmp
+                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          empty-tuple-source
+                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                                      data-scan [$$2]<-[$$2] <- default.q22_orders_tmp
+                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          empty-tuple-source
+                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            data-scan [$$1]<-[$$1] <- default.q22_customer_tmp1
+                            -- DATASOURCE_SCAN  |PARTITIONED|
+                              exchange 
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                empty-tuple-source
+                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q2_minimum_cost_supplier.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q2_minimum_cost_supplier.plan
new file mode 100644
index 0000000..151f34d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q2_minimum_cost_supplier.plan
@@ -0,0 +1,156 @@
+write [%0->$$20, %0->$$16, %0->$$26, %0->$$1, %0->$$13, %0->$$3, %0->$$17, %0->$$19, %0->$$21]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$20, $$16, $$26, $$1, $$13, $$3, $$17, $$19, $$21])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$10])
+      -- HYBRID_HASH_JOIN [$$1][$$10]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+          project ([$$1, $$3])
+          -- STREAM_PROJECT  |PARTITIONED|
+            select (function-call: algebricks:and, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$5, %BRASS], function-call: algebricks:eq, Args:[%0->$$6, 15], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$5, %BRASS]])
+            -- STREAM_SELECT  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan [$$1, $$3, $$5, $$6]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.part
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$10]  |PARTITIONED|
+          project ([$$10, $$16, $$17, $$19, $$20, $$21, $$26, $$13])
+          -- STREAM_PROJECT  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              join (function-call: algebricks:eq, Args:[%0->$$11, %0->$$15])
+              -- HYBRID_HASH_JOIN [$$11][$$15]  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$11]  |PARTITIONED|
+                  data-scan [$$11, $$10, $$13]<-[$$10, $$11, $$12, $$13, $$14] <- default.partsupp
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$15]  |PARTITIONED|
+                  project ([$$15, $$16, $$17, $$19, $$20, $$21, $$26])
+                  -- STREAM_PROJECT  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      join (function-call: algebricks:eq, Args:[%0->$$18, %0->$$25])
+                      -- HYBRID_HASH_JOIN [$$18][$$25]  |PARTITIONED|
+                        exchange 
+                        -- HASH_PARTITION_EXCHANGE [$$18]  |PARTITIONED|
+                          data-scan []<-[$$15, $$16, $$17, $$18, $$19, $$20, $$21] <- default.supplier
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                        exchange 
+                        -- HASH_PARTITION_EXCHANGE [$$25]  |PARTITIONED|
+                          project ([$$25, $$26])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$22, %0->$$27])
+                              -- HYBRID_HASH_JOIN [$$22][$$27]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$22]  |PARTITIONED|
+                                  project ([$$22])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    select (function-call: algebricks:eq, Args:[%0->$$23, EUROPE])
+                                    -- STREAM_SELECT  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        data-scan [$$23, $$22]<-[$$22, $$23, $$24] <- default.region
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            empty-tuple-source
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$27]  |PARTITIONED|
+                                  data-scan [$$27, $$25, $$26]<-[$$25, $$26, $$27, $$28] <- default.nation
+                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      empty-tuple-source
+                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$10, %0->$$11]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$10, $$11])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      group by ([$$10 := %0->$$14]) decor ([]) {
+                aggregate [$$11] <- [function-call: hive:min(FINAL), Args:[%0->$$13]]
+                -- AGGREGATE  |LOCAL|
+                  nested tuple source
+                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+             }
+      -- PRE_CLUSTERED_GROUP_BY[$$14]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$14(ASC)] HASH:[$$14]  |PARTITIONED|
+          group by ([$$14 := %0->$$4]) decor ([]) {
+                    aggregate [$$13] <- [function-call: hive:min(PARTIAL1), Args:[%0->$$5]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- PRE_CLUSTERED_GROUP_BY[$$4]  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+              order (ASC, %0->$$4) 
+              -- STABLE_SORT [$$4(ASC)]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan [$$4, $$5]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.q2_minimum_cost_supplier_tmp1
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$3, %0->$$4, %0->$$5, %0->$$6, %0->$$8, %0->$$9, %0->$$10, %0->$$11]
+-- SINK_WRITE  |UNPARTITIONED|
+  limit 100
+  -- STREAM_LIMIT  |UNPARTITIONED|
+    limit 100
+    -- STREAM_LIMIT  |UNPARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$3(DESC), $$5(ASC), $$4(ASC), $$6(ASC) ]  |PARTITIONED|
+        limit 100
+        -- STREAM_LIMIT  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+            order (DESC, %0->$$3) (ASC, %0->$$5) (ASC, %0->$$4) (ASC, %0->$$6) 
+            -- STABLE_SORT [$$3(DESC), $$5(ASC), $$4(ASC), $$6(ASC)]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$3, $$4, $$5, $$6, $$8, $$9, $$10, $$11])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    join (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$1, %0->$$6], function-call: algebricks:eq, Args:[%0->$$2, %0->$$7]])
+                    -- HYBRID_HASH_JOIN [$$1, $$2][$$6, $$7]  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$1, $$2]  |PARTITIONED|
+                        data-scan []<-[$$1, $$2] <- default.q2_minimum_cost_supplier_tmp2
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                      exchange 
+                      -- HASH_PARTITION_EXCHANGE [$$6, $$7]  |PARTITIONED|
+                        data-scan [$$6, $$7, $$3, $$4, $$5, $$8, $$9, $$10, $$11]<-[$$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11] <- default.q2_minimum_cost_supplier_tmp1
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q3_shipping_priority.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q3_shipping_priority.plan
new file mode 100644
index 0000000..a1b8e42
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q3_shipping_priority.plan
@@ -0,0 +1,70 @@
+write [%0->$$1, %0->$$34, %0->$$29, %0->$$32]
+-- SINK_WRITE  |UNPARTITIONED|
+  limit 10
+  -- STREAM_LIMIT  |UNPARTITIONED|
+    limit 10
+    -- STREAM_LIMIT  |UNPARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$34(DESC) ]  |PARTITIONED|
+        limit 10
+        -- STREAM_LIMIT  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+            order (DESC, %0->$$34) 
+            -- STABLE_SORT [$$34(DESC)]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$1, $$34, $$29, $$32])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  assign [$$34] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]]]
+                  -- ASSIGN  |PARTITIONED|
+                    project ([$$29, $$32, $$1, $$6, $$7])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$25])
+                        -- HYBRID_HASH_JOIN [$$1][$$25]  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                            project ([$$1, $$6, $$7])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              select (function-call: algebricks:lt, Args:[%0->$$4, 3])
+                              -- STREAM_SELECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  data-scan [$$1, $$4, $$6, $$7]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      empty-tuple-source
+                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$25]  |PARTITIONED|
+                            project ([$$25, $$29, $$32])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              exchange 
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                join (function-call: algebricks:eq, Args:[%0->$$26, %0->$$17])
+                                -- HYBRID_HASH_JOIN [$$26][$$17]  |PARTITIONED|
+                                  exchange 
+                                  -- HASH_PARTITION_EXCHANGE [$$26]  |PARTITIONED|
+                                    data-scan [$$26, $$25, $$29, $$32]<-[$$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33] <- default.orders
+                                    -- DATASOURCE_SCAN  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        empty-tuple-source
+                                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                  exchange 
+                                  -- HASH_PARTITION_EXCHANGE [$$17]  |PARTITIONED|
+                                    project ([$$17])
+                                    -- STREAM_PROJECT  |PARTITIONED|
+                                      select (function-call: algebricks:eq, Args:[%0->$$23, BUILDING])
+                                      -- STREAM_SELECT  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          data-scan [$$17, $$23]<-[$$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24] <- default.customer
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q4_order_priority.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q4_order_priority.plan
new file mode 100644
index 0000000..435fd7c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q4_order_priority.plan
@@ -0,0 +1,82 @@
+write [%0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    distinct ([%0->$$1])
+    -- PRE_SORTED_DISTINCT_BY  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+        order (ASC, %0->$$1) 
+        -- STABLE_SORT [$$1(ASC)]  |LOCAL|
+          exchange 
+          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+            project ([$$1])
+            -- STREAM_PROJECT  |PARTITIONED|
+              select (function-call: algebricks:lt, Args:[%0->$$12, %0->$$13])
+              -- STREAM_SELECT  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan [$$1, $$12, $$13]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$11, %0->$$13]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$11, $$13])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$13] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFToInteger, Args:[%0->$$12]]
+    -- ASSIGN  |PARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$11(ASC) ]  |PARTITIONED|
+        order (ASC, %0->$$11) 
+        -- STABLE_SORT [$$11(ASC)]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            group by ([$$11 := %0->$$16]) decor ([]) {
+                      aggregate [$$12] <- [function-call: hive:count(FINAL), Args:[%0->$$15]]
+                      -- AGGREGATE  |LOCAL|
+                        nested tuple source
+                        -- NESTED_TUPLE_SOURCE  |LOCAL|
+                   }
+            -- EXTERNAL_GROUP_BY[$$16]  |PARTITIONED|
+              exchange 
+              -- HASH_PARTITION_EXCHANGE [$$16]  |PARTITIONED|
+                group by ([$$16 := %0->$$7]) decor ([]) {
+                          aggregate [$$15] <- [function-call: hive:count(PARTIAL1), Args:[1]]
+                          -- AGGREGATE  |LOCAL|
+                            nested tuple source
+                            -- NESTED_TUPLE_SOURCE  |LOCAL|
+                       }
+                -- EXTERNAL_GROUP_BY[$$7]  |LOCAL|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    project ([$$7])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$2])
+                        -- HYBRID_HASH_JOIN [$$1][$$2]  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                            data-scan []<-[$$1] <- default.q4_order_priority_tmp
+                            -- DATASOURCE_SCAN  |PARTITIONED|
+                              exchange 
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                empty-tuple-source
+                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                          exchange 
+                          -- HASH_PARTITION_EXCHANGE [$$2]  |PARTITIONED|
+                            project ([$$2, $$7])
+                            -- STREAM_PROJECT  |PARTITIONED|
+                              select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$6, 1993-10-01], function-call: algebricks:ge, Args:[%0->$$6, 1993-07-01], function-call: algebricks:lt, Args:[%0->$$6, 1993-10-01]])
+                              -- STREAM_SELECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  data-scan [$$2, $$6, $$7]<-[$$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10] <- default.orders
+                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      empty-tuple-source
+                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q5_local_supplier_volume.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q5_local_supplier_volume.plan
new file mode 100644
index 0000000..177d24c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q5_local_supplier_volume.plan
@@ -0,0 +1,126 @@
+write [%0->$$48, %0->$$49]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$48, $$49])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- SORT_MERGE_EXCHANGE [$$49(DESC) ]  |PARTITIONED|
+      order (DESC, %0->$$49) 
+      -- STABLE_SORT [$$49(DESC)]  |LOCAL|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          group by ([$$48 := %0->$$52]) decor ([]) {
+                    aggregate [$$49] <- [function-call: hive:sum(FINAL), Args:[%0->$$51]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$52]  |PARTITIONED|
+            exchange 
+            -- HASH_PARTITION_EXCHANGE [$$52]  |PARTITIONED|
+              group by ([$$52 := %0->$$42]) decor ([]) {
+                        aggregate [$$51] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$23, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$24]]]]
+                        -- AGGREGATE  |LOCAL|
+                          nested tuple source
+                          -- NESTED_TUPLE_SOURCE  |LOCAL|
+                     }
+              -- EXTERNAL_GROUP_BY[$$42]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  project ([$$42, $$23, $$24])
+                  -- STREAM_PROJECT  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      join (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$37, %0->$$4], function-call: algebricks:eq, Args:[%0->$$10, %0->$$1]])
+                      -- HYBRID_HASH_JOIN [$$37, $$10][$$4, $$1]  |PARTITIONED|
+                        exchange 
+                        -- HASH_PARTITION_EXCHANGE [$$37, $$10]  |PARTITIONED|
+                          project ([$$10, $$42, $$23, $$24, $$37])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$18, %0->$$9])
+                              -- HYBRID_HASH_JOIN [$$18][$$9]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$18]  |PARTITIONED|
+                                  project ([$$18, $$23, $$24, $$42, $$37])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      join (function-call: algebricks:eq, Args:[%0->$$34, %0->$$20])
+                                      -- HYBRID_HASH_JOIN [$$34][$$20]  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$34]  |PARTITIONED|
+                                          project ([$$34, $$37, $$42])
+                                          -- STREAM_PROJECT  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              join (function-call: algebricks:eq, Args:[%0->$$41, %0->$$37])
+                                              -- HYBRID_HASH_JOIN [$$41][$$37]  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$41]  |PARTITIONED|
+                                                  project ([$$41, $$42])
+                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                    exchange 
+                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                      join (function-call: algebricks:eq, Args:[%0->$$45, %0->$$43])
+                                                      -- HYBRID_HASH_JOIN [$$45][$$43]  |PARTITIONED|
+                                                        exchange 
+                                                        -- HASH_PARTITION_EXCHANGE [$$45]  |PARTITIONED|
+                                                          project ([$$45])
+                                                          -- STREAM_PROJECT  |PARTITIONED|
+                                                            select (function-call: algebricks:eq, Args:[%0->$$46, ASIA])
+                                                            -- STREAM_SELECT  |PARTITIONED|
+                                                              exchange 
+                                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                data-scan [$$46, $$45]<-[$$45, $$46, $$47] <- default.region
+                                                                -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                  exchange 
+                                                                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                    empty-tuple-source
+                                                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                        exchange 
+                                                        -- HASH_PARTITION_EXCHANGE [$$43]  |PARTITIONED|
+                                                          data-scan [$$43, $$41, $$42]<-[$$41, $$42, $$43, $$44] <- default.nation
+                                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                                            exchange 
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              empty-tuple-source
+                                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$37]  |PARTITIONED|
+                                                  data-scan [$$37, $$34]<-[$$34, $$35, $$36, $$37, $$38, $$39, $$40] <- default.supplier
+                                                  -- DATASOURCE_SCAN  |PARTITIONED|
+                                                    exchange 
+                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                      empty-tuple-source
+                                                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$20]  |PARTITIONED|
+                                          data-scan [$$20, $$18, $$23, $$24]<-[$$18, $$19, $$20, $$21, $$22, $$23, $$24, $$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33] <- default.lineitem
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$9]  |PARTITIONED|
+                                  project ([$$9, $$10])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$13, 1995-01-01], function-call: algebricks:ge, Args:[%0->$$13, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$13, 1995-01-01]])
+                                    -- STREAM_SELECT  |PARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        data-scan [$$9, $$10, $$13]<-[$$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17] <- default.orders
+                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            empty-tuple-source
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                        exchange 
+                        -- HASH_PARTITION_EXCHANGE [$$4, $$1]  |PARTITIONED|
+                          data-scan [$$4, $$1]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8] <- default.customer
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q6_forecast_revenue_change.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q6_forecast_revenue_change.plan
new file mode 100644
index 0000000..cd9ffcd
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q6_forecast_revenue_change.plan
@@ -0,0 +1,34 @@
+write [%0->$$17]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    group by ([]) decor ([]) {
+              aggregate [$$17] <- [function-call: hive:sum(FINAL), Args:[%0->$$19]]
+              -- AGGREGATE  |LOCAL|
+                nested tuple source
+                -- NESTED_TUPLE_SOURCE  |LOCAL|
+           }
+    -- EXTERNAL_GROUP_BY[]  |PARTITIONED|
+      exchange 
+      -- HASH_PARTITION_EXCHANGE []  |PARTITIONED|
+        group by ([]) decor ([]) {
+                  aggregate [$$19] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, %0->$$7]]]
+                  -- AGGREGATE  |LOCAL|
+                    nested tuple source
+                    -- NESTED_TUPLE_SOURCE  |LOCAL|
+               }
+        -- EXTERNAL_GROUP_BY[]  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            project ([$$6, $$7])
+            -- STREAM_PROJECT  |PARTITIONED|
+              select (function-call: algebricks:and, Args:[function-call: algebricks:ge, Args:[%0->$$11, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1995-01-01], function-call: algebricks:ge, Args:[%0->$$7, 0.05], function-call: algebricks:le, Args:[%0->$$7, 0.07], function-call: algebricks:lt, Args:[%0->$$5, 24], function-call: algebricks:ge, Args:[%0->$$11, 1994-01-01], function-call: algebricks:lt, Args:[%0->$$11, 1995-01-01], function-call: algebricks:ge, Args:[%0->$$7, 0.05], function-call: algebricks:le, Args:[%0->$$7, 0.07], function-call: algebricks:lt, Args:[%0->$$5, 24]])
+              -- STREAM_SELECT  |PARTITIONED|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  data-scan [$$5, $$6, $$7, $$11]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      empty-tuple-source
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q7_volume_shipping.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q7_volume_shipping.plan
new file mode 100644
index 0000000..39f8301
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q7_volume_shipping.plan
@@ -0,0 +1,192 @@
+write [%0->$$17, %0->$$18, %0->$$19, %0->$$20]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    union ($$6, $$10, $$17) ($$2, $$14, $$18) ($$5, $$9, $$19) ($$1, $$13, $$20)
+    -- UNION_ALL  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+        project ([$$6, $$2, $$5, $$1])
+        -- STREAM_PROJECT  |UNPARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            join (true)
+            -- NESTED_LOOP  |PARTITIONED|
+              exchange 
+              -- BROADCAST_EXCHANGE  |PARTITIONED|
+                select (function-call: algebricks:eq, Args:[%0->$$2, GERMANY])
+                -- STREAM_SELECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$1, $$2]<-[$$1, $$2, $$3, $$4] <- default.nation
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$5, $$6])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  select (function-call: algebricks:eq, Args:[%0->$$6, FRANCE])
+                  -- STREAM_SELECT  |PARTITIONED|
+                    project ([$$5, $$6])
+                    -- STREAM_PROJECT  |UNPARTITIONED|
+                      assign [$$5, $$6] <- [%0->$$9, %0->$$10]
+                      -- ASSIGN  |UNPARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          replicate 
+                          -- SPLIT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              data-scan [$$9, $$10]<-[$$9, $$10, $$11, $$12] <- default.nation
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+        project ([$$10, $$14, $$9, $$13])
+        -- STREAM_PROJECT  |UNPARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            join (true)
+            -- NESTED_LOOP  |PARTITIONED|
+              exchange 
+              -- BROADCAST_EXCHANGE  |PARTITIONED|
+                select (function-call: algebricks:eq, Args:[%0->$$14, FRANCE])
+                -- STREAM_SELECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$13, $$14]<-[$$13, $$14, $$15, $$16] <- default.nation
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                project ([$$9, $$10])
+                -- STREAM_PROJECT  |PARTITIONED|
+                  select (function-call: algebricks:eq, Args:[%0->$$10, GERMANY])
+                  -- STREAM_SELECT  |PARTITIONED|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      replicate 
+                      -- SPLIT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          data-scan [$$9, $$10]<-[$$9, $$10, $$11, $$12] <- default.nation
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$47, %0->$$48, %0->$$49, %0->$$50]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$47, $$48, $$49, $$50])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- SORT_MERGE_EXCHANGE [$$47(ASC), $$48(ASC), $$49(ASC) ]  |PARTITIONED|
+      order (ASC, %0->$$47) (ASC, %0->$$48) (ASC, %0->$$49) 
+      -- STABLE_SORT [$$47(ASC), $$48(ASC), $$49(ASC)]  |LOCAL|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          group by ([$$47 := %0->$$53; $$48 := %0->$$54; $$49 := %0->$$55]) decor ([]) {
+                    aggregate [$$50] <- [function-call: hive:sum(FINAL), Args:[%0->$$52]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$53, $$54, $$55]  |PARTITIONED|
+            exchange 
+            -- HASH_PARTITION_EXCHANGE [$$53, $$54, $$55]  |PARTITIONED|
+              group by ([$$53 := %0->$$1; $$54 := %0->$$2; $$55 := %0->$$45]) decor ([]) {
+                        aggregate [$$52] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$46]]
+                        -- AGGREGATE  |LOCAL|
+                          nested tuple source
+                          -- NESTED_TUPLE_SOURCE  |LOCAL|
+                     }
+              -- EXTERNAL_GROUP_BY[$$1, $$2, $$45]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  project ([$$1, $$2, $$45, $$46])
+                  -- STREAM_PROJECT  |PARTITIONED|
+                    assign [$$45, $$46] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFYear, Args:[%0->$$30], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$25, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$26]]]
+                    -- ASSIGN  |PARTITIONED|
+                      project ([$$1, $$2, $$30, $$25, $$26])
+                      -- STREAM_PROJECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          join (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$15, %0->$$4], function-call: algebricks:eq, Args:[%0->$$8, %0->$$3]])
+                          -- HYBRID_HASH_JOIN [$$15, $$8][$$4, $$3]  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$15, $$8]  |PARTITIONED|
+                              project ([$$8, $$30, $$25, $$26, $$15])
+                              -- STREAM_PROJECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  join (function-call: algebricks:eq, Args:[%0->$$22, %0->$$5])
+                                  -- HYBRID_HASH_JOIN [$$22][$$5]  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$22]  |PARTITIONED|
+                                      project ([$$15, $$30, $$25, $$26, $$22])
+                                      -- STREAM_PROJECT  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          join (function-call: algebricks:eq, Args:[%0->$$37, %0->$$12])
+                                          -- HYBRID_HASH_JOIN [$$37][$$12]  |PARTITIONED|
+                                            exchange 
+                                            -- HASH_PARTITION_EXCHANGE [$$37]  |PARTITIONED|
+                                              project ([$$37, $$22, $$25, $$26, $$30])
+                                              -- STREAM_PROJECT  |PARTITIONED|
+                                                exchange 
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  join (function-call: algebricks:eq, Args:[%0->$$20, %0->$$36])
+                                                  -- HYBRID_HASH_JOIN [$$20][$$36]  |PARTITIONED|
+                                                    exchange 
+                                                    -- HASH_PARTITION_EXCHANGE [$$20]  |PARTITIONED|
+                                                      select (function-call: algebricks:and, Args:[function-call: algebricks:le, Args:[%0->$$30, 1996-12-31], function-call: algebricks:ge, Args:[%0->$$30, 1995-01-01], function-call: algebricks:le, Args:[%0->$$30, 1996-12-31]])
+                                                      -- STREAM_SELECT  |PARTITIONED|
+                                                        exchange 
+                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                          data-scan [$$20, $$22, $$25, $$26, $$30]<-[$$20, $$21, $$22, $$23, $$24, $$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33, $$34, $$35] <- default.lineitem
+                                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                                            exchange 
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              empty-tuple-source
+                                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                    exchange 
+                                                    -- HASH_PARTITION_EXCHANGE [$$36]  |PARTITIONED|
+                                                      data-scan [$$36, $$37]<-[$$36, $$37, $$38, $$39, $$40, $$41, $$42, $$43, $$44] <- default.orders
+                                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                                        exchange 
+                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                          empty-tuple-source
+                                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                            exchange 
+                                            -- HASH_PARTITION_EXCHANGE [$$12]  |PARTITIONED|
+                                              data-scan [$$12, $$15]<-[$$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19] <- default.customer
+                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                exchange 
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  empty-tuple-source
+                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$5]  |PARTITIONED|
+                                      data-scan [$$5, $$8]<-[$$5, $$6, $$7, $$8, $$9, $$10, $$11] <- default.supplier
+                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          empty-tuple-source
+                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$4, $$3]  |PARTITIONED|
+                              data-scan [$$4, $$3, $$1, $$2]<-[$$1, $$2, $$3, $$4] <- default.q7_volume_shipping_tmp
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q8_national_market_share.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q8_national_market_share.plan
new file mode 100644
index 0000000..b807a24
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q8_national_market_share.plan
@@ -0,0 +1,190 @@
+write [%0->$$63, %0->$$66]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- SORT_MERGE_EXCHANGE [$$63(ASC) ]  |PARTITIONED|
+    order (ASC, %0->$$63) 
+    -- STABLE_SORT [$$63(ASC)]  |LOCAL|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$63, $$66])
+        -- STREAM_PROJECT  |PARTITIONED|
+          assign [$$66] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPDivide, Args:[%0->$$64, %0->$$65]]
+          -- ASSIGN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              group by ([$$63 := %0->$$70]) decor ([]) {
+                        aggregate [$$64, $$65] <- [function-call: hive:sum(FINAL), Args:[%0->$$68], function-call: hive:sum(FINAL), Args:[%0->$$69]]
+                        -- AGGREGATE  |LOCAL|
+                          nested tuple source
+                          -- NESTED_TUPLE_SOURCE  |LOCAL|
+                     }
+              -- EXTERNAL_GROUP_BY[$$70]  |PARTITIONED|
+                exchange 
+                -- HASH_PARTITION_EXCHANGE [$$70]  |PARTITIONED|
+                  group by ([$$70 := %0->$$61]) decor ([]) {
+                            aggregate [$$68, $$69] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen, Args:[function-call: algebricks:eq, Args:[%0->$$2, BRAZIL], %0->$$62, 0.0]], function-call: hive:sum(PARTIAL1), Args:[%0->$$62]]
+                            -- AGGREGATE  |LOCAL|
+                              nested tuple source
+                              -- NESTED_TUPLE_SOURCE  |LOCAL|
+                         }
+                  -- EXTERNAL_GROUP_BY[$$61]  |LOCAL|
+                    exchange 
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      project ([$$61, $$62, $$2])
+                      -- STREAM_PROJECT  |PARTITIONED|
+                        assign [$$61, $$62] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFYear, Args:[%0->$$41], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$26, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$27]]]
+                        -- ASSIGN  |PARTITIONED|
+                          project ([$$2, $$41, $$27, $$26])
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              join (function-call: algebricks:eq, Args:[%0->$$8, %0->$$1])
+                              -- HYBRID_HASH_JOIN [$$8][$$1]  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$8]  |PARTITIONED|
+                                  project ([$$8, $$41, $$27, $$26])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    exchange 
+                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                      join (function-call: algebricks:eq, Args:[%0->$$23, %0->$$5])
+                                      -- HYBRID_HASH_JOIN [$$23][$$5]  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$23]  |PARTITIONED|
+                                          project ([$$41, $$27, $$26, $$23])
+                                          -- STREAM_PROJECT  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              join (function-call: algebricks:eq, Args:[%0->$$22, %0->$$12])
+                                              -- HYBRID_HASH_JOIN [$$22][$$12]  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$22]  |PARTITIONED|
+                                                  project ([$$41, $$22, $$23, $$26, $$27])
+                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                    exchange 
+                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                      join (function-call: algebricks:eq, Args:[%0->$$21, %0->$$37])
+                                                      -- HYBRID_HASH_JOIN [$$21][$$37]  |PARTITIONED|
+                                                        exchange 
+                                                        -- HASH_PARTITION_EXCHANGE [$$21]  |PARTITIONED|
+                                                          data-scan [$$21, $$22, $$23, $$26, $$27]<-[$$21, $$22, $$23, $$24, $$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33, $$34, $$35, $$36] <- default.lineitem
+                                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                                            exchange 
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              empty-tuple-source
+                                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                        exchange 
+                                                        -- HASH_PARTITION_EXCHANGE [$$37]  |PARTITIONED|
+                                                          project ([$$37, $$41])
+                                                          -- STREAM_PROJECT  |PARTITIONED|
+                                                            exchange 
+                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                              join (function-call: algebricks:eq, Args:[%0->$$46, %0->$$38])
+                                                              -- HYBRID_HASH_JOIN [$$46][$$38]  |PARTITIONED|
+                                                                exchange 
+                                                                -- HASH_PARTITION_EXCHANGE [$$46]  |PARTITIONED|
+                                                                  project ([$$46])
+                                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                                    exchange 
+                                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                      join (function-call: algebricks:eq, Args:[%0->$$54, %0->$$49])
+                                                                      -- HYBRID_HASH_JOIN [$$54][$$49]  |PARTITIONED|
+                                                                        exchange 
+                                                                        -- HASH_PARTITION_EXCHANGE [$$54]  |PARTITIONED|
+                                                                          project ([$$54])
+                                                                          -- STREAM_PROJECT  |PARTITIONED|
+                                                                            exchange 
+                                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                              join (function-call: algebricks:eq, Args:[%0->$$58, %0->$$56])
+                                                                              -- HYBRID_HASH_JOIN [$$58][$$56]  |PARTITIONED|
+                                                                                exchange 
+                                                                                -- HASH_PARTITION_EXCHANGE [$$58]  |PARTITIONED|
+                                                                                  project ([$$58])
+                                                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                                                    select (function-call: algebricks:eq, Args:[%0->$$59, AMERICA])
+                                                                                    -- STREAM_SELECT  |PARTITIONED|
+                                                                                      exchange 
+                                                                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                        data-scan [$$59, $$58]<-[$$58, $$59, $$60] <- default.region
+                                                                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                                          exchange 
+                                                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                            empty-tuple-source
+                                                                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                                                exchange 
+                                                                                -- HASH_PARTITION_EXCHANGE [$$56]  |PARTITIONED|
+                                                                                  project ([$$56, $$54])
+                                                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                                                    exchange 
+                                                                                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                      replicate 
+                                                                                      -- SPLIT  |PARTITIONED|
+                                                                                        exchange 
+                                                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                          data-scan []<-[$$54, $$55, $$56, $$57] <- default.nation
+                                                                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                                            exchange 
+                                                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                                              empty-tuple-source
+                                                                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                                        exchange 
+                                                                        -- HASH_PARTITION_EXCHANGE [$$49]  |PARTITIONED|
+                                                                          data-scan [$$49, $$46]<-[$$46, $$47, $$48, $$49, $$50, $$51, $$52, $$53] <- default.customer
+                                                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                            exchange 
+                                                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                              empty-tuple-source
+                                                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                                exchange 
+                                                                -- HASH_PARTITION_EXCHANGE [$$38]  |PARTITIONED|
+                                                                  project ([$$38, $$37, $$41])
+                                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                                    select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$41, 1996-12-31], function-call: algebricks:ge, Args:[%0->$$41, 1995-01-01]])
+                                                                    -- STREAM_SELECT  |PARTITIONED|
+                                                                      exchange 
+                                                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                        data-scan [$$38, $$37, $$41]<-[$$37, $$38, $$39, $$40, $$41, $$42, $$43, $$44, $$45] <- default.orders
+                                                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                          exchange 
+                                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                            empty-tuple-source
+                                                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                exchange 
+                                                -- HASH_PARTITION_EXCHANGE [$$12]  |PARTITIONED|
+                                                  project ([$$12])
+                                                  -- STREAM_PROJECT  |PARTITIONED|
+                                                    select (function-call: algebricks:eq, Args:[%0->$$16, ECONOMY ANODIZED STEEL])
+                                                    -- STREAM_SELECT  |PARTITIONED|
+                                                      exchange 
+                                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                        data-scan [$$16, $$12]<-[$$12, $$13, $$14, $$15, $$16, $$17, $$18, $$19, $$20] <- default.part
+                                                        -- DATASOURCE_SCAN  |PARTITIONED|
+                                                          exchange 
+                                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                            empty-tuple-source
+                                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                        exchange 
+                                        -- HASH_PARTITION_EXCHANGE [$$5]  |PARTITIONED|
+                                          data-scan [$$5, $$8]<-[$$5, $$6, $$7, $$8, $$9, $$10, $$11] <- default.supplier
+                                          -- DATASOURCE_SCAN  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              empty-tuple-source
+                                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                exchange 
+                                -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                                  project ([$$1, $$2])
+                                  -- STREAM_PROJECT  |PARTITIONED|
+                                    assign [$$1, $$2, $$3, $$4] <- [%0->$$54, %0->$$55, %0->$$56, %0->$$57]
+                                    -- ASSIGN  |UNPARTITIONED|
+                                      exchange 
+                                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                        replicate 
+                                        -- SPLIT  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            data-scan []<-[$$54, $$55, $$56, $$57] <- default.nation
+                                            -- DATASOURCE_SCAN  |PARTITIONED|
+                                              exchange 
+                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                empty-tuple-source
+                                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q9_product_type_profit.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q9_product_type_profit.plan
new file mode 100644
index 0000000..f57f4a3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/q9_product_type_profit.plan
@@ -0,0 +1,124 @@
+write [%0->$$53, %0->$$54, %0->$$55]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$53, $$54, $$55])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- SORT_MERGE_EXCHANGE [$$53(ASC), $$54(DESC) ]  |PARTITIONED|
+      order (ASC, %0->$$53) (DESC, %0->$$54) 
+      -- STABLE_SORT [$$53(ASC), $$54(DESC)]  |LOCAL|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          group by ([$$53 := %0->$$58; $$54 := %0->$$59]) decor ([]) {
+                    aggregate [$$55] <- [function-call: hive:sum(FINAL), Args:[%0->$$57]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$58, $$59]  |PARTITIONED|
+            exchange 
+            -- HASH_PARTITION_EXCHANGE [$$58, $$59]  |PARTITIONED|
+              group by ([$$58 := %0->$$48; $$59 := %0->$$51]) decor ([]) {
+                        aggregate [$$57] <- [function-call: hive:sum(PARTIAL1), Args:[%0->$$52]]
+                        -- AGGREGATE  |LOCAL|
+                          nested tuple source
+                          -- NESTED_TUPLE_SOURCE  |LOCAL|
+                     }
+              -- EXTERNAL_GROUP_BY[$$48, $$51]  |LOCAL|
+                exchange 
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  project ([$$48, $$51, $$52])
+                  -- STREAM_PROJECT  |PARTITIONED|
+                    assign [$$51, $$52] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFYear, Args:[%0->$$5], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$29, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$30]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$22, %0->$$28]]]
+                    -- ASSIGN  |PARTITIONED|
+                      project ([$$5, $$29, $$30, $$28, $$48, $$22])
+                      -- STREAM_PROJECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          join (function-call: algebricks:eq, Args:[%0->$$24, %0->$$1])
+                          -- HYBRID_HASH_JOIN [$$24][$$1]  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$24]  |PARTITIONED|
+                              project ([$$29, $$30, $$28, $$24, $$48, $$22])
+                              -- STREAM_PROJECT  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  join (function-call: algebricks:eq, Args:[%0->$$25, %0->$$10])
+                                  -- HYBRID_HASH_JOIN [$$25][$$10]  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$25]  |PARTITIONED|
+                                      project ([$$22, $$29, $$30, $$28, $$25, $$24, $$48])
+                                      -- STREAM_PROJECT  |PARTITIONED|
+                                        exchange 
+                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                          join (function-call: algebricks:and, Args:[function-call: algebricks:eq, Args:[%0->$$26, %0->$$20], function-call: algebricks:eq, Args:[%0->$$25, %0->$$19]])
+                                          -- HYBRID_HASH_JOIN [$$26, $$25][$$20, $$19]  |PARTITIONED|
+                                            exchange 
+                                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                              project ([$$48, $$24, $$25, $$26, $$28, $$29, $$30])
+                                              -- STREAM_PROJECT  |PARTITIONED|
+                                                exchange 
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  join (function-call: algebricks:eq, Args:[%0->$$26, %0->$$40])
+                                                  -- HYBRID_HASH_JOIN [$$26][$$40]  |PARTITIONED|
+                                                    exchange 
+                                                    -- HASH_PARTITION_EXCHANGE [$$26]  |PARTITIONED|
+                                                      data-scan [$$26, $$24, $$25, $$28, $$29, $$30]<-[$$24, $$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33, $$34, $$35, $$36, $$37, $$38, $$39] <- default.lineitem
+                                                      -- DATASOURCE_SCAN  |PARTITIONED|
+                                                        exchange 
+                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                          empty-tuple-source
+                                                          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                    exchange 
+                                                    -- HASH_PARTITION_EXCHANGE [$$40]  |PARTITIONED|
+                                                      project ([$$40, $$48])
+                                                      -- STREAM_PROJECT  |PARTITIONED|
+                                                        exchange 
+                                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                          join (function-call: algebricks:eq, Args:[%0->$$43, %0->$$47])
+                                                          -- HYBRID_HASH_JOIN [$$43][$$47]  |PARTITIONED|
+                                                            exchange 
+                                                            -- HASH_PARTITION_EXCHANGE [$$43]  |PARTITIONED|
+                                                              data-scan [$$43, $$40]<-[$$40, $$41, $$42, $$43, $$44, $$45, $$46] <- default.supplier
+                                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                exchange 
+                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                  empty-tuple-source
+                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                                            exchange 
+                                                            -- HASH_PARTITION_EXCHANGE [$$47]  |PARTITIONED|
+                                                              data-scan [$$47, $$48]<-[$$47, $$48, $$49, $$50] <- default.nation
+                                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                                exchange 
+                                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                                  empty-tuple-source
+                                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                            exchange 
+                                            -- HASH_PARTITION_EXCHANGE [$$20]  |PARTITIONED|
+                                              data-scan [$$20, $$19, $$22]<-[$$19, $$20, $$21, $$22, $$23] <- default.partsupp
+                                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                                exchange 
+                                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                  empty-tuple-source
+                                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                    exchange 
+                                    -- HASH_PARTITION_EXCHANGE [$$10]  |PARTITIONED|
+                                      project ([$$10])
+                                      -- STREAM_PROJECT  |PARTITIONED|
+                                        select (function-call: hive:org.apache.hadoop.hive.ql.udf.UDFLike, Args:[%0->$$11, %green%])
+                                        -- STREAM_SELECT  |PARTITIONED|
+                                          exchange 
+                                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                            data-scan [$$10, $$11]<-[$$10, $$11, $$12, $$13, $$14, $$15, $$16, $$17, $$18] <- default.part
+                                            -- DATASOURCE_SCAN  |PARTITIONED|
+                                              exchange 
+                                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                                empty-tuple-source
+                                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                            exchange 
+                            -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+                              data-scan [$$1, $$5]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.orders
+                              -- DATASOURCE_SCAN  |PARTITIONED|
+                                exchange 
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  empty-tuple-source
+                                  -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u10_nestedloop_join.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u10_nestedloop_join.plan
new file mode 100644
index 0000000..c86d57f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u10_nestedloop_join.plan
@@ -0,0 +1,24 @@
+write [%0->$$6, %0->$$2, %0->$$5, %0->$$1]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$6, $$2, $$5, $$1])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:and, Args:[function-call: algebricks:gt, Args:[%0->$$5, %0->$$1], true])
+      -- NESTED_LOOP  |PARTITIONED|
+        exchange 
+        -- BROADCAST_EXCHANGE  |PARTITIONED|
+          data-scan [$$1, $$2]<-[$$1, $$2, $$3, $$4] <- default.nation
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          data-scan [$$5, $$6]<-[$$5, $$6, $$7, $$8] <- default.nation
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u1_group_by.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u1_group_by.plan
new file mode 100644
index 0000000..188aa6d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u1_group_by.plan
@@ -0,0 +1,36 @@
+write [%0->$$18, %0->$$19]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$18, $$19])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      group by ([$$17 := %0->$$23]) decor ([]) {
+                aggregate [$$18, $$19] <- [function-call: hive:sum(FINAL), Args:[%0->$$21], function-call: hive:sum(FINAL), Args:[%0->$$22]]
+                -- AGGREGATE  |LOCAL|
+                  nested tuple source
+                  -- NESTED_TUPLE_SOURCE  |LOCAL|
+             }
+      -- EXTERNAL_GROUP_BY[$$23]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$23]  |PARTITIONED|
+          group by ([$$23 := %0->$$9]) decor ([]) {
+                    aggregate [$$21, $$22] <- [function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFAbs, Args:[%0->$$5]], function-call: hive:sum(PARTIAL1), Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$6, function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMinus, Args:[1, %0->$$7]], function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPPlus, Args:[1, %0->$$8]]]]
+                    -- AGGREGATE  |LOCAL|
+                      nested tuple source
+                      -- NESTED_TUPLE_SOURCE  |LOCAL|
+                 }
+          -- EXTERNAL_GROUP_BY[$$9]  |LOCAL|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              project ([$$9, $$5, $$6, $$7, $$8])
+              -- STREAM_PROJECT  |PARTITIONED|
+                select (function-call: algebricks:le, Args:[%0->$$11, 1998-09-02])
+                -- STREAM_SELECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$5, $$6, $$7, $$8, $$9, $$11]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u2_select-project.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u2_select-project.plan
new file mode 100644
index 0000000..4485b36
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u2_select-project.plan
@@ -0,0 +1,16 @@
+write [%0->$$8, %0->$$3, %0->$$4, %0->$$2]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$8, $$3, $$4, $$2])
+  -- STREAM_PROJECT  |PARTITIONED|
+    assign [$$8] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[2, %0->$$1]]
+    -- ASSIGN  |PARTITIONED|
+      select (function-call: algebricks:lt, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$1, 2], 20])
+      -- STREAM_SELECT  |PARTITIONED|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          data-scan []<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u3_union.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u3_union.plan
new file mode 100644
index 0000000..c4040f2
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u3_union.plan
@@ -0,0 +1,38 @@
+write [%0->$$17, %0->$$18, %0->$$19, %0->$$20]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    union ($$8, $$16, $$17) ($$3, $$11, $$18) ($$4, $$12, $$19) ($$2, $$10, $$20)
+    -- UNION_ALL  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$8, $$3, $$4, $$2])
+        -- STREAM_PROJECT  |PARTITIONED|
+          assign [$$8] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[2, %0->$$1]]
+          -- ASSIGN  |PARTITIONED|
+            select (function-call: algebricks:gt, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$1, 2], 50])
+            -- STREAM_SELECT  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan []<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        project ([$$16, $$11, $$12, $$10])
+        -- STREAM_PROJECT  |PARTITIONED|
+          assign [$$16] <- [function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[2, %0->$$9]]
+          -- ASSIGN  |PARTITIONED|
+            select (function-call: algebricks:lt, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$9, 2], 20])
+            -- STREAM_SELECT  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                data-scan []<-[$$9, $$10, $$11, $$12, $$13, $$14, $$15] <- default.supplier
+                -- DATASOURCE_SCAN  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    empty-tuple-source
+                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u4_join.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u4_join.plan
new file mode 100644
index 0000000..449a601
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u4_join.plan
@@ -0,0 +1,36 @@
+write [%0->$$1, %0->$$2, %0->$$3, %0->$$4]
+-- SINK_WRITE  |PARTITIONED|
+  select (function-call: algebricks:lt, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$1, 2], 20])
+  -- STREAM_SELECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      data-scan [$$1, $$2, $$3, $$4]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+      -- DATASOURCE_SCAN  |PARTITIONED|
+        exchange 
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          empty-tuple-source
+          -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+write [%0->$$1, %0->$$9, %0->$$10, %0->$$11]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$1, $$9, $$10, $$11])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:eq, Args:[%0->$$5, %0->$$1])
+      -- HYBRID_HASH_JOIN [$$5][$$1]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$5]  |PARTITIONED|
+          data-scan [$$5, $$9, $$10, $$11]<-[$$5, $$6, $$7, $$8, $$9, $$10, $$11] <- default.supplier
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+          data-scan [$$1]<-[$$1, $$2, $$3, $$4] <- default.result
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u5_lateral_view.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u5_lateral_view.plan
new file mode 100644
index 0000000..48e624e
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u5_lateral_view.plan
@@ -0,0 +1,14 @@
+write [%0->$$2, %0->$$3, %0->$$8]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$2, $$3, $$8])
+  -- STREAM_PROJECT  |PARTITIONED|
+    unnest $$8 <- function-call: hive:explode, Args:[%0->$$1]
+    -- UNNEST  |PARTITIONED|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        data-scan [$$1, $$2, $$3]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+        -- DATASOURCE_SCAN  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            empty-tuple-source
+            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u6_limit.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u6_limit.plan
new file mode 100644
index 0000000..b5ed12f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u6_limit.plan
@@ -0,0 +1,26 @@
+write [%0->$$1, %0->$$4, %0->$$5, %0->$$7]
+-- SINK_WRITE  |UNPARTITIONED|
+  limit 4
+  -- STREAM_LIMIT  |UNPARTITIONED|
+    limit 4
+    -- STREAM_LIMIT  |UNPARTITIONED|
+      exchange 
+      -- SORT_MERGE_EXCHANGE [$$4(ASC) ]  |PARTITIONED|
+        limit 4
+        -- STREAM_LIMIT  |LOCAL|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+            order (ASC, %0->$$4) 
+            -- STABLE_SORT [$$4(ASC)]  |LOCAL|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                select (function-call: algebricks:lt, Args:[%0->$$4, 10000])
+                -- STREAM_SELECT  |PARTITIONED|
+                  exchange 
+                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                    data-scan [$$1, $$4, $$5, $$7]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9] <- default.orders
+                    -- DATASOURCE_SCAN  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        empty-tuple-source
+                        -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u7_multi_join.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u7_multi_join.plan
new file mode 100644
index 0000000..ab55181
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u7_multi_join.plan
@@ -0,0 +1,52 @@
+write [%0->$$4, %0->$$25, %0->$$28, %0->$$29, %0->$$32]
+-- SINK_WRITE  |PARTITIONED|
+  project ([$$4, $$25, $$28, $$29, $$32])
+  -- STREAM_PROJECT  |PARTITIONED|
+    exchange 
+    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+      join (function-call: algebricks:eq, Args:[%0->$$1, %0->$$25])
+      -- HYBRID_HASH_JOIN [$$1][$$25]  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$1]  |PARTITIONED|
+          data-scan [$$1, $$4]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7, $$8, $$9, $$10, $$11, $$12, $$13, $$14, $$15, $$16] <- default.lineitem
+          -- DATASOURCE_SCAN  |PARTITIONED|
+            exchange 
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              empty-tuple-source
+              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+        exchange 
+        -- HASH_PARTITION_EXCHANGE [$$25]  |PARTITIONED|
+          project ([$$32, $$25, $$29, $$28])
+          -- STREAM_PROJECT  |PARTITIONED|
+            project ([$$25, $$17, $$28, $$29, $$32])
+            -- STREAM_PROJECT  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                join (function-call: algebricks:eq, Args:[%0->$$26, %0->$$17])
+                -- HYBRID_HASH_JOIN [$$26][$$17]  |PARTITIONED|
+                  exchange 
+                  -- HASH_PARTITION_EXCHANGE [$$26]  |PARTITIONED|
+                    select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$28, 30000], function-call: algebricks:lt, Args:[%0->$$28, 30000]])
+                    -- STREAM_SELECT  |PARTITIONED|
+                      exchange 
+                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                        data-scan [$$32, $$25, $$26, $$29, $$28]<-[$$25, $$26, $$27, $$28, $$29, $$30, $$31, $$32, $$33] <- default.orders
+                        -- DATASOURCE_SCAN  |PARTITIONED|
+                          exchange 
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            empty-tuple-source
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                  exchange 
+                  -- HASH_PARTITION_EXCHANGE [$$17]  |PARTITIONED|
+                    project ([$$17])
+                    -- STREAM_PROJECT  |PARTITIONED|
+                      select (function-call: algebricks:and, Args:[function-call: algebricks:lt, Args:[%0->$$17, 5], function-call: algebricks:lt, Args:[%0->$$17, 5]])
+                      -- STREAM_SELECT  |PARTITIONED|
+                        exchange 
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          data-scan [$$17]<-[$$17, $$18, $$19, $$20, $$21, $$22, $$23, $$24] <- default.customer
+                          -- DATASOURCE_SCAN  |PARTITIONED|
+                            exchange 
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              empty-tuple-source
+                              -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u8_non_mapred.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u8_non_mapred.plan
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u8_non_mapred.plan
diff --git a/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u9_order_by.plan b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u9_order_by.plan
new file mode 100644
index 0000000..7370fcf
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/optimizerts/results/u9_order_by.plan
@@ -0,0 +1,18 @@
+write [%0->$$1, %0->$$2, %0->$$3, %0->$$4]
+-- SINK_WRITE  |PARTITIONED|
+  exchange 
+  -- SORT_MERGE_EXCHANGE [$$2(ASC) ]  |PARTITIONED|
+    order (ASC, %0->$$2) 
+    -- STABLE_SORT [$$2(ASC)]  |LOCAL|
+      exchange 
+      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+        select (function-call: algebricks:lt, Args:[function-call: hive:org.apache.hadoop.hive.ql.udf.UDFOPMultiply, Args:[%0->$$1, 2], 20])
+        -- STREAM_SELECT  |PARTITIONED|
+          exchange 
+          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+            data-scan [$$1, $$2, $$3, $$4]<-[$$1, $$2, $$3, $$4, $$5, $$6, $$7] <- default.supplier
+            -- DATASOURCE_SCAN  |PARTITIONED|
+              exchange 
+              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                empty-tuple-source
+                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/conf/cluster b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/conf/cluster
new file mode 100644
index 0000000..ee81dc1
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/conf/cluster
@@ -0,0 +1,3 @@
+2
+127.0.0.1 nc0
+127.0.0.1 nc1
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/customer.tbl b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/customer.tbl
new file mode 100644
index 0000000..5d39c80
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/customer.tbl
@@ -0,0 +1,150 @@
+1|Customer#000000001|IVhzIApeRb ot,c,E|15|25-989-741-2988|711.56|BUILDING|to the even, regular platelets. regular, ironic epitaphs nag e|
+2|Customer#000000002|XSTf4,NCwDVaWNe6tEgvwfmRchLXak|13|23-768-687-3665|121.65|AUTOMOBILE|l accounts. blithely ironic theodolites integrate boldly: caref|
+3|Customer#000000003|MG9kdTD2WBHm|1|11-719-748-3364|7498.12|AUTOMOBILE| deposits eat slyly ironic, even instructions. express foxes detect slyly. blithely even accounts abov|
+4|Customer#000000004|XxVSJsLAGtn|4|14-128-190-5944|2866.83|MACHINERY| requests. final, regular ideas sleep final accou|
+5|Customer#000000005|KvpyuHCplrB84WgAiGV6sYpZq7Tj|3|13-750-942-6364|794.47|HOUSEHOLD|n accounts will have to unwind. foxes cajole accor|
+6|Customer#000000006|sKZz0CsnMD7mp4Xd0YrBvx,LREYKUWAh yVn|20|30-114-968-4951|7638.57|AUTOMOBILE|tions. even deposits boost according to the slyly bold packages. final accounts cajole requests. furious|
+7|Customer#000000007|TcGe5gaZNgVePxU5kRrvXBfkasDTea|18|28-190-982-9759|9561.95|AUTOMOBILE|ainst the ironic, express theodolites. express, even pinto beans among the exp|
+8|Customer#000000008|I0B10bB0AymmC, 0PrRYBCP1yGJ8xcBPmWhl5|17|27-147-574-9335|6819.74|BUILDING|among the slyly regular theodolites kindle blithely courts. carefully even theodolites haggle slyly along the ide|
+9|Customer#000000009|xKiAFTjUsCuxfeleNqefumTrjS|8|18-338-906-3675|8324.07|FURNITURE|r theodolites according to the requests wake thinly excuses: pending requests haggle furiousl|
+10|Customer#000000010|6LrEaV6KR6PLVcgl2ArL Q3rqzLzcT1 v2|5|15-741-346-9870|2753.54|HOUSEHOLD|es regular deposits haggle. fur|
+11|Customer#000000011|PkWS 3HlXqwTuzrKg633BEi|23|33-464-151-3439|-272.60|BUILDING|ckages. requests sleep slyly. quickly even pinto beans promise above the slyly regular pinto beans. |
+12|Customer#000000012|9PWKuhzT4Zr1Q|13|23-791-276-1263|3396.49|HOUSEHOLD| to the carefully final braids. blithely regular requests nag. ironic theodolites boost quickly along|
+13|Customer#000000013|nsXQu0oVjD7PM659uC3SRSp|3|13-761-547-5974|3857.34|BUILDING|ounts sleep carefully after the close frays. carefully bold notornis use ironic requests. blithely|
+14|Customer#000000014|KXkletMlL2JQEA |1|11-845-129-3851|5266.30|FURNITURE|, ironic packages across the unus|
+15|Customer#000000015|YtWggXoOLdwdo7b0y,BZaGUQMLJMX1Y,EC,6Dn|23|33-687-542-7601|2788.52|HOUSEHOLD| platelets. regular deposits detect asymptotes. blithely unusual packages nag slyly at the fluf|
+16|Customer#000000016|cYiaeMLZSMAOQ2 d0W,|10|20-781-609-3107|4681.03|FURNITURE|kly silent courts. thinly regular theodolites sleep fluffily after |
+17|Customer#000000017|izrh 6jdqtp2eqdtbkswDD8SG4SzXruMfIXyR7|2|12-970-682-3487|6.34|AUTOMOBILE|packages wake! blithely even pint|
+18|Customer#000000018|3txGO AiuFux3zT0Z9NYaFRnZt|6|16-155-215-1315|5494.43|BUILDING|s sleep. carefully even instructions nag furiously alongside of t|
+19|Customer#000000019|uc,3bHIx84H,wdrmLOjVsiqXCq2tr|18|28-396-526-5053|8914.71|HOUSEHOLD| nag. furiously careful packages are slyly at the accounts. furiously regular in|
+20|Customer#000000020|JrPk8Pqplj4Ne|22|32-957-234-8742|7603.40|FURNITURE|g alongside of the special excuses-- fluffily enticing packages wake |
+21|Customer#000000021|XYmVpr9yAHDEn|8|18-902-614-8344|1428.25|MACHINERY| quickly final accounts integrate blithely furiously u|
+22|Customer#000000022|QI6p41,FNs5k7RZoCCVPUTkUdYpB|3|13-806-545-9701|591.98|MACHINERY|s nod furiously above the furiously ironic ideas. |
+23|Customer#000000023|OdY W13N7Be3OC5MpgfmcYss0Wn6TKT|3|13-312-472-8245|3332.02|HOUSEHOLD|deposits. special deposits cajole slyly. fluffily special deposits about the furiously |
+24|Customer#000000024|HXAFgIAyjxtdqwimt13Y3OZO 4xeLe7U8PqG|13|23-127-851-8031|9255.67|MACHINERY|into beans. fluffily final ideas haggle fluffily|
+25|Customer#000000025|Hp8GyFQgGHFYSilH5tBfe|12|22-603-468-3533|7133.70|FURNITURE|y. accounts sleep ruthlessly according to the regular theodolites. unusual instructions sleep. ironic, final|
+26|Customer#000000026|8ljrc5ZeMl7UciP|22|32-363-455-4837|5182.05|AUTOMOBILE|c requests use furiously ironic requests. slyly ironic dependencies us|
+27|Customer#000000027|IS8GIyxpBrLpMT0u7|3|13-137-193-2709|5679.84|BUILDING| about the carefully ironic pinto beans. accoun|
+28|Customer#000000028|iVyg0daQ,Tha8x2WPWA9m2529m|8|18-774-241-1462|1007.18|FURNITURE| along the regular deposits. furiously final pac|
+29|Customer#000000029|sJ5adtfyAkCK63df2,vF25zyQMVYE34uh|0|10-773-203-7342|7618.27|FURNITURE|its after the carefully final platelets x-ray against |
+30|Customer#000000030|nJDsELGAavU63Jl0c5NKsKfL8rIJQQkQnYL2QJY|1|11-764-165-5076|9321.01|BUILDING|lithely final requests. furiously unusual account|
+31|Customer#000000031|LUACbO0viaAv6eXOAebryDB xjVst|23|33-197-837-7094|5236.89|HOUSEHOLD|s use among the blithely pending depo|
+32|Customer#000000032|jD2xZzi UmId,DCtNBLXKj9q0Tlp2iQ6ZcO3J|15|25-430-914-2194|3471.53|BUILDING|cial ideas. final, furious requests across the e|
+33|Customer#000000033|qFSlMuLucBmx9xnn5ib2csWUweg D|17|27-375-391-1280|-78.56|AUTOMOBILE|s. slyly regular accounts are furiously. carefully pending requests|
+34|Customer#000000034|Q6G9wZ6dnczmtOx509xgE,M2KV|15|25-344-968-5422|8589.70|HOUSEHOLD|nder against the even, pending accounts. even|
+35|Customer#000000035|TEjWGE4nBzJL2|17|27-566-888-7431|1228.24|HOUSEHOLD|requests. special, express requests nag slyly furiousl|
+36|Customer#000000036|3TvCzjuPzpJ0,DdJ8kW5U|21|31-704-669-5769|4987.27|BUILDING|haggle. enticing, quiet platelets grow quickly bold sheaves. carefully regular acc|
+37|Customer#000000037|7EV4Pwh,3SboctTWt|8|18-385-235-7162|-917.75|FURNITURE|ilent packages are carefully among the deposits. furiousl|
+38|Customer#000000038|a5Ee5e9568R8RLP 2ap7|12|22-306-880-7212|6345.11|HOUSEHOLD|lar excuses. closely even asymptotes cajole blithely excuses. carefully silent pinto beans sleep carefully fin|
+39|Customer#000000039|nnbRg,Pvy33dfkorYE FdeZ60|2|12-387-467-6509|6264.31|AUTOMOBILE|tions. slyly silent excuses slee|
+40|Customer#000000040|gOnGWAyhSV1ofv|3|13-652-915-8939|1335.30|BUILDING|rges impress after the slyly ironic courts. foxes are. blithely |
+41|Customer#000000041|IM9mzmyoxeBmvNw8lA7G3Ydska2nkZF|10|20-917-711-4011|270.95|HOUSEHOLD|ly regular accounts hang bold, silent packages. unusual foxes haggle slyly above the special, final depo|
+42|Customer#000000042|ziSrvyyBke|5|15-416-330-4175|8727.01|BUILDING|ssly according to the pinto beans: carefully special requests across the even, pending accounts wake special|
+43|Customer#000000043|ouSbjHk8lh5fKX3zGso3ZSIj9Aa3PoaFd|19|29-316-665-2897|9904.28|MACHINERY|ial requests: carefully pending foxes detect quickly. carefully final courts cajole quickly. carefully|
+44|Customer#000000044|Oi,dOSPwDu4jo4x,,P85E0dmhZGvNtBwi|16|26-190-260-5375|7315.94|AUTOMOBILE|r requests around the unusual, bold a|
+45|Customer#000000045|4v3OcpFgoOmMG,CbnF,4mdC|9|19-715-298-9917|9983.38|AUTOMOBILE|nto beans haggle slyly alongside of t|
+46|Customer#000000046|eaTXWWm10L9|6|16-357-681-2007|5744.59|AUTOMOBILE|ctions. accounts sleep furiously even requests. regular, regular accounts cajole blithely around the final pa|
+47|Customer#000000047|b0UgocSqEW5 gdVbhNT|2|12-427-271-9466|274.58|BUILDING|ions. express, ironic instructions sleep furiously ironic ideas. furi|
+48|Customer#000000048|0UU iPhBupFvemNB|0|10-508-348-5882|3792.50|BUILDING|re fluffily pending foxes. pending, bold platelets sleep slyly. even platelets cajo|
+49|Customer#000000049|cNgAeX7Fqrdf7HQN9EwjUa4nxT,68L FKAxzl|10|20-908-631-4424|4573.94|FURNITURE|nusual foxes! fluffily pending packages maintain to the regular |
+50|Customer#000000050|9SzDYlkzxByyJ1QeTI o|6|16-658-112-3221|4266.13|MACHINERY|ts. furiously ironic accounts cajole furiously slyly ironic dinos.|
+51|Customer#000000051|uR,wEaiTvo4|12|22-344-885-4251|855.87|FURNITURE|eposits. furiously regular requests integrate carefully packages. furious|
+52|Customer#000000052|7 QOqGqqSy9jfV51BC71jcHJSD0|11|21-186-284-5998|5630.28|HOUSEHOLD|ic platelets use evenly even accounts. stealthy theodolites cajole furiou|
+53|Customer#000000053|HnaxHzTfFTZs8MuCpJyTbZ47Cm4wFOOgib|15|25-168-852-5363|4113.64|HOUSEHOLD|ar accounts are. even foxes are blithely. fluffily pending deposits boost|
+54|Customer#000000054|,k4vf 5vECGWFy,hosTE,|4|14-776-370-4745|868.90|AUTOMOBILE|sual, silent accounts. furiously express accounts cajole special deposits. final, final accounts use furi|
+55|Customer#000000055|zIRBR4KNEl HzaiV3a i9n6elrxzDEh8r8pDom|10|20-180-440-8525|4572.11|MACHINERY|ully unusual packages wake bravely bold packages. unusual requests boost deposits! blithely ironic packages ab|
+56|Customer#000000056|BJYZYJQk4yD5B|10|20-895-685-6920|6530.86|FURNITURE|. notornis wake carefully. carefully fluffy requests are furiously even accounts. slyly expre|
+57|Customer#000000057|97XYbsuOPRXPWU|21|31-835-306-1650|4151.93|AUTOMOBILE|ove the carefully special packages. even, unusual deposits sleep slyly pend|
+58|Customer#000000058|g9ap7Dk1Sv9fcXEWjpMYpBZIRUohi T|13|23-244-493-2508|6478.46|HOUSEHOLD|ideas. ironic ideas affix furiously express, final instructions. regular excuses use quickly e|
+59|Customer#000000059|zLOCP0wh92OtBihgspOGl4|1|11-355-584-3112|3458.60|MACHINERY|ously final packages haggle blithely after the express deposits. furiou|
+60|Customer#000000060|FyodhjwMChsZmUz7Jz0H|12|22-480-575-5866|2741.87|MACHINERY|latelets. blithely unusual courts boost furiously about the packages. blithely final instruct|
+61|Customer#000000061|9kndve4EAJxhg3veF BfXr7AqOsT39o gtqjaYE|17|27-626-559-8599|1536.24|FURNITURE|egular packages shall have to impress along the |
+62|Customer#000000062|upJK2Dnw13,|7|17-361-978-7059|595.61|MACHINERY|kly special dolphins. pinto beans are slyly. quickly regular accounts are furiously a|
+63|Customer#000000063|IXRSpVWWZraKII|21|31-952-552-9584|9331.13|AUTOMOBILE|ithely even accounts detect slyly above the fluffily ir|
+64|Customer#000000064|MbCeGY20kaKK3oalJD,OT|3|13-558-731-7204|-646.64|BUILDING|structions after the quietly ironic theodolites cajole be|
+65|Customer#000000065|RGT yzQ0y4l0H90P783LG4U95bXQFDRXbWa1sl,X|23|33-733-623-5267|8795.16|AUTOMOBILE|y final foxes serve carefully. theodolites are carefully. pending i|
+66|Customer#000000066|XbsEqXH1ETbJYYtA1A|22|32-213-373-5094|242.77|HOUSEHOLD|le slyly accounts. carefully silent packages benea|
+67|Customer#000000067|rfG0cOgtr5W8 xILkwp9fpCS8|9|19-403-114-4356|8166.59|MACHINERY|indle furiously final, even theodo|
+68|Customer#000000068|o8AibcCRkXvQFh8hF,7o|12|22-918-832-2411|6853.37|HOUSEHOLD| pending pinto beans impress realms. final dependencies |
+69|Customer#000000069|Ltx17nO9Wwhtdbe9QZVxNgP98V7xW97uvSH1prEw|9|19-225-978-5670|1709.28|HOUSEHOLD|thely final ideas around the quickly final dependencies affix carefully quickly final theodolites. final accounts c|
+70|Customer#000000070|mFowIuhnHjp2GjCiYYavkW kUwOjIaTCQ|22|32-828-107-2832|4867.52|FURNITURE|fter the special asymptotes. ideas after the unusual frets cajole quickly regular pinto be|
+71|Customer#000000071|TlGalgdXWBmMV,6agLyWYDyIz9MKzcY8gl,w6t1B|7|17-710-812-5403|-611.19|HOUSEHOLD|g courts across the regular, final pinto beans are blithely pending ac|
+72|Customer#000000072|putjlmskxE,zs,HqeIA9Wqu7dhgH5BVCwDwHHcf|2|12-759-144-9689|-362.86|FURNITURE|ithely final foxes sleep always quickly bold accounts. final wat|
+73|Customer#000000073|8IhIxreu4Ug6tt5mog4|0|10-473-439-3214|4288.50|BUILDING|usual, unusual packages sleep busily along the furiou|
+74|Customer#000000074|IkJHCA3ZThF7qL7VKcrU nRLl,kylf |4|14-199-862-7209|2764.43|MACHINERY|onic accounts. blithely slow packages would haggle carefully. qui|
+75|Customer#000000075|Dh 6jZ,cwxWLKQfRKkiGrzv6pm|18|28-247-803-9025|6684.10|AUTOMOBILE| instructions cajole even, even deposits. finally bold deposits use above the even pains. slyl|
+76|Customer#000000076|m3sbCvjMOHyaOofH,e UkGPtqc4|0|10-349-718-3044|5745.33|FURNITURE|pecial deposits. ironic ideas boost blithely according to the closely ironic theodolites! furiously final deposits n|
+77|Customer#000000077|4tAE5KdMFGD4byHtXF92vx|17|27-269-357-4674|1738.87|BUILDING|uffily silent requests. carefully ironic asymptotes among the ironic hockey players are carefully bli|
+78|Customer#000000078|HBOta,ZNqpg3U2cSL0kbrftkPwzX|9|19-960-700-9191|7136.97|FURNITURE|ests. blithely bold pinto beans h|
+79|Customer#000000079|n5hH2ftkVRwW8idtD,BmM2|15|25-147-850-4166|5121.28|MACHINERY|es. packages haggle furiously. regular, special requests poach after the quickly express ideas. blithely pending re|
+80|Customer#000000080|K,vtXp8qYB |0|10-267-172-7101|7383.53|FURNITURE|tect among the dependencies. bold accounts engage closely even pinto beans. ca|
+81|Customer#000000081|SH6lPA7JiiNC6dNTrR|20|30-165-277-3269|2023.71|BUILDING|r packages. fluffily ironic requests cajole fluffily. ironically regular theodolit|
+82|Customer#000000082|zhG3EZbap4c992Gj3bK,3Ne,Xn|18|28-159-442-5305|9468.34|AUTOMOBILE|s wake. bravely regular accounts are furiously. regula|
+83|Customer#000000083|HnhTNB5xpnSF20JBH4Ycs6psVnkC3RDf|22|32-817-154-4122|6463.51|BUILDING|ccording to the quickly bold warhorses. final, regular foxes integrate carefully. bold packages nag blithely ev|
+84|Customer#000000084|lpXz6Fwr9945rnbtMc8PlueilS1WmASr CB|11|21-546-818-3802|5174.71|FURNITURE|ly blithe foxes. special asymptotes haggle blithely against the furiously regular depo|
+85|Customer#000000085|siRerlDwiolhYR 8FgksoezycLj|5|15-745-585-8219|3386.64|FURNITURE|ronic ideas use above the slowly pendin|
+86|Customer#000000086|US6EGGHXbTTXPL9SBsxQJsuvy|0|10-677-951-2353|3306.32|HOUSEHOLD|quests. pending dugouts are carefully aroun|
+87|Customer#000000087|hgGhHVSWQl 6jZ6Ev|23|33-869-884-7053|6327.54|FURNITURE|hely ironic requests integrate according to the ironic accounts. slyly regular pla|
+88|Customer#000000088|wtkjBN9eyrFuENSMmMFlJ3e7jE5KXcg|16|26-516-273-2566|8031.44|AUTOMOBILE|s are quickly above the quickly ironic instructions; even requests about the carefully final deposi|
+89|Customer#000000089|dtR, y9JQWUO6FoJExyp8whOU|14|24-394-451-5404|1530.76|FURNITURE|counts are slyly beyond the slyly final accounts. quickly final ideas wake. r|
+90|Customer#000000090|QxCzH7VxxYUWwfL7|16|26-603-491-1238|7354.23|BUILDING|sly across the furiously even |
+91|Customer#000000091|S8OMYFrpHwoNHaGBeuS6E 6zhHGZiprw1b7 q|8|18-239-400-3677|4643.14|AUTOMOBILE|onic accounts. fluffily silent pinto beans boost blithely according to the fluffily exp|
+92|Customer#000000092|obP PULk2LH LqNF,K9hcbNqnLAkJVsl5xqSrY,|2|12-446-416-8471|1182.91|MACHINERY|. pinto beans hang slyly final deposits. ac|
+93|Customer#000000093|EHXBr2QGdh|7|17-359-388-5266|2182.52|MACHINERY|press deposits. carefully regular platelets r|
+94|Customer#000000094|IfVNIN9KtkScJ9dUjK3Pg5gY1aFeaXewwf|9|19-953-499-8833|5500.11|HOUSEHOLD|latelets across the bold, final requests sleep according to the fluffily bold accounts. unusual deposits amon|
+95|Customer#000000095|EU0xvmWvOmUUn5J,2z85DQyG7QCJ9Xq7|15|25-923-255-2929|5327.38|MACHINERY|ithely. ruthlessly final requests wake slyly alongside of the furiously silent pinto beans. even the|
+96|Customer#000000096|vWLOrmXhRR|8|18-422-845-1202|6323.92|AUTOMOBILE|press requests believe furiously. carefully final instructions snooze carefully. |
+97|Customer#000000097|OApyejbhJG,0Iw3j rd1M|17|27-588-919-5638|2164.48|AUTOMOBILE|haggle slyly. bold, special ideas are blithely above the thinly bold theo|
+98|Customer#000000098|7yiheXNSpuEAwbswDW|12|22-885-845-6889|-551.37|BUILDING|ages. furiously pending accounts are quickly carefully final foxes: busily pe|
+99|Customer#000000099|szsrOiPtCHVS97Lt|15|25-515-237-9232|4088.65|HOUSEHOLD|cajole slyly about the regular theodolites! furiously bold requests nag along the pending, regular packages. somas|
+100|Customer#000000100|fptUABXcmkC5Wx|20|30-749-445-4907|9889.89|FURNITURE|was furiously fluffily quiet deposits. silent, pending requests boost against |
+101|Customer#000000101|sMmL2rNeHDltovSm Y|2|12-514-298-3699|7470.96|MACHINERY| sleep. pending packages detect slyly ironic pack|
+102|Customer#000000102|UAtflJ06 fn9zBfKjInkQZlWtqaA|19|29-324-978-8538|8462.17|BUILDING|ously regular dependencies nag among the furiously express dinos. blithely final|
+103|Customer#000000103|8KIsQX4LJ7QMsj6DrtFtXu0nUEdV,8a|9|19-216-107-2107|2757.45|BUILDING|furiously pending notornis boost slyly around the blithely ironic ideas? final, even instructions cajole fl|
+104|Customer#000000104|9mcCK L7rt0SwiYtrbO88DiZS7U d7M|10|20-966-284-8065|-588.38|FURNITURE|rate carefully slyly special pla|
+105|Customer#000000105|4iSJe4L SPjg7kJj98Yz3z0B|10|20-793-553-6417|9091.82|MACHINERY|l pains cajole even accounts. quietly final instructi|
+106|Customer#000000106|xGCOEAUjUNG|1|11-751-989-4627|3288.42|MACHINERY|lose slyly. ironic accounts along the evenly regular theodolites wake about the special, final gifts. |
+107|Customer#000000107|Zwg64UZ,q7GRqo3zm7P1tZIRshBDz|15|25-336-529-9919|2514.15|AUTOMOBILE|counts cajole slyly. regular requests wake. furiously regular deposits about the blithely final fo|
+108|Customer#000000108|GPoeEvpKo1|5|15-908-619-7526|2259.38|BUILDING|refully ironic deposits sleep. regular, unusual requests wake slyly|
+109|Customer#000000109|OOOkYBgCMzgMQXUmkocoLb56rfrdWp2NE2c|16|26-992-422-8153|-716.10|BUILDING|es. fluffily final dependencies sleep along the blithely even pinto beans. final deposits haggle furiously furiou|
+110|Customer#000000110|mymPfgphaYXNYtk|10|20-893-536-2069|7462.99|AUTOMOBILE|nto beans cajole around the even, final deposits. quickly bold packages according to the furiously regular dept|
+111|Customer#000000111|CBSbPyOWRorloj2TBvrK9qp9tHBs|22|32-582-283-7528|6505.26|MACHINERY|ly unusual instructions detect fluffily special deposits-- theodolites nag carefully during the ironic dependencies|
+112|Customer#000000112|RcfgG3bO7QeCnfjqJT1|19|29-233-262-8382|2953.35|FURNITURE|rmanently unusual multipliers. blithely ruthless deposits are furiously along the|
+113|Customer#000000113|eaOl5UBXIvdY57rglaIzqvfPD,MYfK|12|22-302-930-4756|2912.00|BUILDING|usly regular theodolites boost furiously doggedly pending instructio|
+114|Customer#000000114|xAt 5f5AlFIU|14|24-805-212-7646|1027.46|FURNITURE|der the carefully express theodolites are after the packages. packages are. bli|
+115|Customer#000000115|0WFt1IXENmUT2BgbsB0ShVKJZt0HCBCbFl0aHc|8|18-971-699-1843|7508.92|HOUSEHOLD|sits haggle above the carefully ironic theodolite|
+116|Customer#000000116|yCuVxIgsZ3,qyK2rloThy3u|16|26-632-309-5792|8403.99|BUILDING|as. quickly final sauternes haggle slyly carefully even packages. brave, ironic pinto beans are above the furious|
+117|Customer#000000117|uNhM,PzsRA3S,5Y Ge5Npuhi|24|34-403-631-3505|3950.83|FURNITURE|affix. instructions are furiously sl|
+118|Customer#000000118|OVnFuHygK9wx3xpg8|18|28-639-943-7051|3582.37|AUTOMOBILE|uick packages alongside of the furiously final deposits haggle above the fluffily even foxes. blithely dogged dep|
+119|Customer#000000119|M1ETOIecuvH8DtM0Y0nryXfW|7|17-697-919-8406|3930.35|FURNITURE|express ideas. blithely ironic foxes thrash. special acco|
+120|Customer#000000120|zBNna00AEInqyO1|12|22-291-534-1571|363.75|MACHINERY| quickly. slyly ironic requests cajole blithely furiously final dependen|
+121|Customer#000000121|tv nCR2YKupGN73mQudO|17|27-411-990-2959|6428.32|BUILDING|uriously stealthy ideas. carefully final courts use carefully|
+122|Customer#000000122|yp5slqoNd26lAENZW3a67wSfXA6hTF|3|13-702-694-4520|7865.46|HOUSEHOLD| the special packages hinder blithely around the permanent requests. bold depos|
+123|Customer#000000123|YsOnaaER8MkvK5cpf4VSlq|5|15-817-151-1168|5897.83|BUILDING|ependencies. regular, ironic requests are fluffily regu|
+124|Customer#000000124|aTbyVAW5tCd,v09O|18|28-183-750-7809|1842.49|AUTOMOBILE|le fluffily even dependencies. quietly s|
+125|Customer#000000125|,wSZXdVR xxIIfm9s8ITyLl3kgjT6UC07GY0Y|19|29-261-996-3120|-234.12|FURNITURE|x-ray finally after the packages? regular requests c|
+126|Customer#000000126|ha4EHmbx3kg DYCsP6DFeUOmavtQlHhcfaqr|22|32-755-914-7592|1001.39|HOUSEHOLD|s about the even instructions boost carefully furiously ironic pearls. ruthless, |
+127|Customer#000000127|Xyge4DX2rXKxXyye1Z47LeLVEYMLf4Bfcj|21|31-101-672-2951|9280.71|MACHINERY|ic, unusual theodolites nod silently after the final, ironic instructions: pending r|
+128|Customer#000000128|AmKUMlJf2NRHcKGmKjLS|4|14-280-874-8044|-986.96|HOUSEHOLD|ing packages integrate across the slyly unusual dugouts. blithely silent ideas sublate carefully. blithely expr|
+129|Customer#000000129|q7m7rbMM0BpaCdmxloCgBDRCleXsXkdD8kf|7|17-415-148-7416|9127.27|HOUSEHOLD| unusual deposits boost carefully furiously silent ideas. pending accounts cajole slyly across|
+130|Customer#000000130|RKPx2OfZy0Vn 8wGWZ7F2EAvmMORl1k8iH|9|19-190-993-9281|5073.58|HOUSEHOLD|ix slowly. express packages along the furiously ironic requests integrate daringly deposits. fur|
+131|Customer#000000131|jyN6lAjb1FtH10rMC,XzlWyCBrg75|11|21-840-210-3572|8595.53|HOUSEHOLD|jole special packages. furiously final dependencies about the furiously speci|
+132|Customer#000000132|QM5YabAsTLp9|4|14-692-150-9717|162.57|HOUSEHOLD|uickly carefully special theodolites. carefully regular requests against the blithely unusual instructions |
+133|Customer#000000133|IMCuXdpIvdkYO92kgDGuyHgojcUs88p|17|27-408-997-8430|2314.67|AUTOMOBILE|t packages. express pinto beans are blithely along the unusual, even theodolites. silent packages use fu|
+134|Customer#000000134|sUiZ78QCkTQPICKpA9OBzkUp2FM|11|21-200-159-5932|4608.90|BUILDING|yly fluffy foxes boost final ideas. b|
+135|Customer#000000135|oZK,oC0 fdEpqUML|19|29-399-293-6241|8732.91|FURNITURE| the slyly final accounts. deposits cajole carefully. carefully sly packag|
+136|Customer#000000136|QoLsJ0v5C1IQbh,DS1|7|17-501-210-4726|-842.39|FURNITURE|ackages sleep ironic, final courts. even requests above the blithely bold requests g|
+137|Customer#000000137|cdW91p92rlAEHgJafqYyxf1Q|16|26-777-409-5654|7838.30|HOUSEHOLD|carefully regular theodolites use. silent dolphins cajo|
+138|Customer#000000138|5uyLAeY7HIGZqtu66Yn08f|5|15-394-860-4589|430.59|MACHINERY|ts doze on the busy ideas. regular|
+139|Customer#000000139|3ElvBwudHKL02732YexGVFVt |9|19-140-352-1403|7897.78|MACHINERY|nstructions. quickly ironic ideas are carefully. bold, |
+140|Customer#000000140|XRqEPiKgcETII,iOLDZp5jA|4|14-273-885-6505|9963.15|MACHINERY|ies detect slyly ironic accounts. slyly ironic theodolites hag|
+141|Customer#000000141|5IW,WROVnikc3l7DwiUDGQNGsLBGOL6Dc0|1|11-936-295-6204|6706.14|FURNITURE|packages nag furiously. carefully unusual accounts snooze according to the fluffily regular pinto beans. slyly spec|
+142|Customer#000000142|AnJ5lxtLjioClr2khl9pb8NLxG2,|9|19-407-425-2584|2209.81|AUTOMOBILE|. even, express theodolites upo|
+143|Customer#000000143|681r22uL452zqk 8By7I9o9enQfx0|16|26-314-406-7725|2186.50|MACHINERY|across the blithely unusual requests haggle theodo|
+144|Customer#000000144|VxYZ3ebhgbltnetaGjNC8qCccjYU05 fePLOno8y|1|11-717-379-4478|6417.31|MACHINERY|ges. slyly regular accounts are slyly. bold, idle reque|
+145|Customer#000000145|kQjHmt2kcec cy3hfMh969u|13|23-562-444-8454|9748.93|HOUSEHOLD|ests? express, express instructions use. blithely fina|
+146|Customer#000000146|GdxkdXG9u7iyI1,,y5tq4ZyrcEy|3|13-835-723-3223|3328.68|FURNITURE|ffily regular dinos are slyly unusual requests. slyly specia|
+147|Customer#000000147|6VvIwbVdmcsMzuu,C84GtBWPaipGfi7DV|18|28-803-187-4335|8071.40|AUTOMOBILE|ress packages above the blithely regular packages sleep fluffily blithely ironic accounts. |
+148|Customer#000000148|BhSPlEWGvIJyT9swk vCWE|11|21-562-498-6636|2135.60|HOUSEHOLD|ing to the carefully ironic requests. carefully regular dependencies about the theodolites wake furious|
+149|Customer#000000149|3byTHCp2mNLPigUrrq|19|29-797-439-6760|8959.65|AUTOMOBILE|al instructions haggle against the slyly bold w|
+150|Customer#000000150|zeoGShTjCwGPplOWFkLURrh41O0AZ8dwNEEN4 |18|28-328-564-7630|3849.48|MACHINERY|ole blithely among the furiously pending packages. furiously bold ideas wake fluffily ironic idea|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/large_card_join_src.tbl b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/large_card_join_src.tbl
new file mode 100644
index 0000000..0168ae9
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/large_card_join_src.tbl
@@ -0,0 +1,1030 @@
+0
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+1
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+2
+3
+3
+3
+4
+4
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/large_card_join_src_small.tbl b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/large_card_join_src_small.tbl
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/large_card_join_src_small.tbl
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/lineitem.tbl b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/lineitem.tbl
new file mode 100644
index 0000000..58d47c6
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/lineitem.tbl
@@ -0,0 +1,6005 @@
+1|156|4|1|17|17954.55|0.04|0.02|N|O|1996-03-13|1996-02-12|1996-03-22|DELIVER IN PERSON|TRUCK|egular courts above the|
+1|68|9|2|36|34850.16|0.09|0.06|N|O|1996-04-12|1996-02-28|1996-04-20|TAKE BACK RETURN|MAIL|ly final dependencies: slyly bold |
+1|64|5|3|8|7712.48|0.10|0.02|N|O|1996-01-29|1996-03-05|1996-01-31|TAKE BACK RETURN|REG AIR|riously. regular, express dep|
+1|3|6|4|28|25284.00|0.09|0.06|N|O|1996-04-21|1996-03-30|1996-05-16|NONE|AIR|lites. fluffily even de|
+1|25|8|5|24|22200.48|0.10|0.04|N|O|1996-03-30|1996-03-14|1996-04-01|NONE|FOB| pending foxes. slyly re|
+1|16|3|6|32|29312.32|0.07|0.02|N|O|1996-01-30|1996-02-07|1996-02-03|DELIVER IN PERSON|MAIL|arefully slyly ex|
+2|107|2|1|38|38269.80|0.00|0.05|N|O|1997-01-28|1997-01-14|1997-02-02|TAKE BACK RETURN|RAIL|ven requests. deposits breach a|
+3|5|2|1|45|40725.00|0.06|0.00|R|F|1994-02-02|1994-01-04|1994-02-23|NONE|AIR|ongside of the furiously brave acco|
+3|20|10|2|49|45080.98|0.10|0.00|R|F|1993-11-09|1993-12-20|1993-11-24|TAKE BACK RETURN|RAIL| unusual accounts. eve|
+3|129|8|3|27|27786.24|0.06|0.07|A|F|1994-01-16|1993-11-22|1994-01-23|DELIVER IN PERSON|SHIP|nal foxes wake. |
+3|30|5|4|2|1860.06|0.01|0.06|A|F|1993-12-04|1994-01-07|1994-01-01|NONE|TRUCK|y. fluffily pending d|
+3|184|5|5|28|30357.04|0.04|0.00|R|F|1993-12-14|1994-01-10|1994-01-01|TAKE BACK RETURN|FOB|ages nag slyly pending|
+3|63|8|6|26|25039.56|0.10|0.02|A|F|1993-10-29|1993-12-18|1993-11-04|TAKE BACK RETURN|RAIL|ges sleep after the caref|
+4|89|10|1|30|29672.40|0.03|0.08|N|O|1996-01-10|1995-12-14|1996-01-18|DELIVER IN PERSON|REG AIR|- quickly regular packages sleep. idly|
+5|109|10|1|15|15136.50|0.02|0.04|R|F|1994-10-31|1994-08-31|1994-11-20|NONE|AIR|ts wake furiously |
+5|124|5|2|26|26627.12|0.07|0.08|R|F|1994-10-16|1994-09-25|1994-10-19|NONE|FOB|sts use slyly quickly special instruc|
+5|38|4|3|50|46901.50|0.08|0.03|A|F|1994-08-08|1994-10-13|1994-08-26|DELIVER IN PERSON|AIR|eodolites. fluffily unusual|
+6|140|6|1|37|38485.18|0.08|0.03|A|F|1992-04-27|1992-05-15|1992-05-02|TAKE BACK RETURN|TRUCK|p furiously special foxes|
+7|183|4|1|12|12998.16|0.07|0.03|N|O|1996-05-07|1996-03-13|1996-06-03|TAKE BACK RETURN|FOB|ss pinto beans wake against th|
+7|146|3|2|9|9415.26|0.08|0.08|N|O|1996-02-01|1996-03-02|1996-02-19|TAKE BACK RETURN|SHIP|es. instructions|
+7|95|8|3|46|45774.14|0.10|0.07|N|O|1996-01-15|1996-03-27|1996-02-03|COLLECT COD|MAIL| unusual reques|
+7|164|5|4|28|29796.48|0.03|0.04|N|O|1996-03-21|1996-04-08|1996-04-20|NONE|FOB|. slyly special requests haggl|
+7|152|4|5|38|39981.70|0.08|0.01|N|O|1996-02-11|1996-02-24|1996-02-18|DELIVER IN PERSON|TRUCK|ns haggle carefully ironic deposits. bl|
+7|80|10|6|35|34302.80|0.06|0.03|N|O|1996-01-16|1996-02-23|1996-01-22|TAKE BACK RETURN|FOB|jole. excuses wake carefully alongside of |
+7|158|3|7|5|5290.75|0.04|0.02|N|O|1996-02-10|1996-03-26|1996-02-13|NONE|FOB|ithely regula|
+32|83|4|1|28|27526.24|0.05|0.08|N|O|1995-10-23|1995-08-27|1995-10-26|TAKE BACK RETURN|TRUCK|sleep quickly. req|
+32|198|10|2|32|35142.08|0.02|0.00|N|O|1995-08-14|1995-10-07|1995-08-27|COLLECT COD|AIR|lithely regular deposits. fluffily |
+32|45|2|3|2|1890.08|0.09|0.02|N|O|1995-08-07|1995-10-07|1995-08-23|DELIVER IN PERSON|AIR| express accounts wake according to the|
+32|3|8|4|4|3612.00|0.09|0.03|N|O|1995-08-04|1995-10-01|1995-09-03|NONE|REG AIR|e slyly final pac|
+32|86|7|5|44|43387.52|0.05|0.06|N|O|1995-08-28|1995-08-20|1995-09-14|DELIVER IN PERSON|AIR|symptotes nag according to the ironic depo|
+32|12|6|6|6|5472.06|0.04|0.03|N|O|1995-07-21|1995-09-23|1995-07-25|COLLECT COD|RAIL| gifts cajole carefully.|
+33|62|7|1|31|29823.86|0.09|0.04|A|F|1993-10-29|1993-12-19|1993-11-08|COLLECT COD|TRUCK|ng to the furiously ironic package|
+33|61|8|2|32|30753.92|0.02|0.05|A|F|1993-12-09|1994-01-04|1993-12-28|COLLECT COD|MAIL|gular theodolites|
+33|138|4|3|5|5190.65|0.05|0.03|A|F|1993-12-09|1993-12-25|1993-12-23|TAKE BACK RETURN|AIR|. stealthily bold exc|
+33|34|5|4|41|38295.23|0.09|0.00|R|F|1993-11-09|1994-01-24|1993-11-11|TAKE BACK RETURN|MAIL|unusual packages doubt caref|
+34|89|10|1|13|12858.04|0.00|0.07|N|O|1998-10-23|1998-09-14|1998-11-06|NONE|REG AIR|nic accounts. deposits are alon|
+34|90|1|2|22|21781.98|0.08|0.06|N|O|1998-10-09|1998-10-16|1998-10-12|NONE|FOB|thely slyly p|
+34|170|7|3|6|6421.02|0.02|0.06|N|O|1998-10-30|1998-09-20|1998-11-05|NONE|FOB|ar foxes sleep |
+35|1|4|1|24|21624.00|0.02|0.00|N|O|1996-02-21|1996-01-03|1996-03-18|TAKE BACK RETURN|FOB|, regular tithe|
+35|162|1|2|34|36113.44|0.06|0.08|N|O|1996-01-22|1996-01-06|1996-01-27|DELIVER IN PERSON|RAIL|s are carefully against the f|
+35|121|4|3|7|7147.84|0.06|0.04|N|O|1996-01-19|1995-12-22|1996-01-29|NONE|MAIL| the carefully regular |
+35|86|7|4|25|24652.00|0.06|0.05|N|O|1995-11-26|1995-12-25|1995-12-21|DELIVER IN PERSON|SHIP| quickly unti|
+35|120|7|5|34|34684.08|0.08|0.06|N|O|1995-11-08|1996-01-15|1995-11-26|COLLECT COD|MAIL|. silent, unusual deposits boost|
+35|31|7|6|28|26068.84|0.03|0.02|N|O|1996-02-01|1995-12-24|1996-02-28|COLLECT COD|RAIL|ly alongside of |
+36|120|1|1|42|42845.04|0.09|0.00|N|O|1996-02-03|1996-01-21|1996-02-23|COLLECT COD|SHIP| careful courts. special |
+37|23|8|1|40|36920.80|0.09|0.03|A|F|1992-07-21|1992-08-01|1992-08-15|NONE|REG AIR|luffily regular requests. slyly final acco|
+37|127|6|2|39|40057.68|0.05|0.02|A|F|1992-07-02|1992-08-18|1992-07-28|TAKE BACK RETURN|RAIL|the final requests. ca|
+37|13|7|3|43|39259.43|0.05|0.08|A|F|1992-07-10|1992-07-06|1992-08-02|DELIVER IN PERSON|TRUCK|iously ste|
+38|176|5|1|44|47351.48|0.04|0.02|N|O|1996-09-29|1996-11-17|1996-09-30|COLLECT COD|MAIL|s. blithely unusual theodolites am|
+39|3|10|1|44|39732.00|0.09|0.06|N|O|1996-11-14|1996-12-15|1996-12-12|COLLECT COD|RAIL|eodolites. careful|
+39|187|8|2|26|28266.68|0.08|0.04|N|O|1996-11-04|1996-10-20|1996-11-20|NONE|FOB|ckages across the slyly silent|
+39|68|3|3|46|44530.76|0.06|0.08|N|O|1996-09-26|1996-12-19|1996-10-26|DELIVER IN PERSON|AIR|he carefully e|
+39|21|6|4|32|29472.64|0.07|0.05|N|O|1996-10-02|1996-12-19|1996-10-14|COLLECT COD|MAIL|heodolites sleep silently pending foxes. ac|
+39|55|10|5|43|41067.15|0.01|0.01|N|O|1996-10-17|1996-11-14|1996-10-26|COLLECT COD|MAIL|yly regular i|
+39|95|7|6|40|39803.60|0.06|0.05|N|O|1996-12-08|1996-10-22|1997-01-01|COLLECT COD|AIR|quickly ironic fox|
+64|86|7|1|21|20707.68|0.05|0.02|R|F|1994-09-30|1994-09-18|1994-10-26|DELIVER IN PERSON|REG AIR|ch slyly final, thin platelets.|
+65|60|5|1|26|24961.56|0.03|0.03|A|F|1995-04-20|1995-04-25|1995-05-13|NONE|TRUCK|pending deposits nag even packages. ca|
+65|74|3|2|22|21429.54|0.00|0.05|N|O|1995-07-17|1995-06-04|1995-07-19|COLLECT COD|FOB| ideas. special, r|
+65|2|5|3|21|18942.00|0.09|0.07|N|O|1995-07-06|1995-05-14|1995-07-31|DELIVER IN PERSON|RAIL|bove the even packages. accounts nag carefu|
+66|116|10|1|31|31499.41|0.00|0.08|R|F|1994-02-19|1994-03-11|1994-02-20|TAKE BACK RETURN|RAIL|ut the unusual accounts sleep at the bo|
+66|174|5|2|41|44040.97|0.04|0.07|A|F|1994-02-21|1994-03-01|1994-03-18|COLLECT COD|AIR| regular de|
+67|22|5|1|4|3688.08|0.09|0.04|N|O|1997-04-17|1997-01-31|1997-04-20|NONE|SHIP| cajole thinly expres|
+67|21|10|2|12|11052.24|0.09|0.05|N|O|1997-01-27|1997-02-21|1997-02-22|NONE|REG AIR| even packages cajole|
+67|174|4|3|5|5370.85|0.03|0.07|N|O|1997-02-20|1997-02-12|1997-02-21|DELIVER IN PERSON|TRUCK|y unusual packages thrash pinto |
+67|88|9|4|44|43475.52|0.08|0.06|N|O|1997-03-18|1997-01-29|1997-04-13|DELIVER IN PERSON|RAIL|se quickly above the even, express reques|
+67|41|10|5|23|21643.92|0.05|0.07|N|O|1997-04-19|1997-02-14|1997-05-06|DELIVER IN PERSON|REG AIR|ly regular deposit|
+67|179|9|6|29|31295.93|0.02|0.05|N|O|1997-01-25|1997-01-27|1997-01-27|DELIVER IN PERSON|FOB|ultipliers |
+68|8|1|1|3|2724.00|0.05|0.02|N|O|1998-07-04|1998-06-05|1998-07-21|NONE|RAIL|fully special instructions cajole. furious|
+68|176|4|2|46|49503.82|0.02|0.05|N|O|1998-06-26|1998-06-07|1998-07-05|NONE|MAIL| requests are unusual, regular pinto |
+68|35|1|3|46|43011.38|0.04|0.05|N|O|1998-08-13|1998-07-08|1998-08-29|NONE|RAIL|egular dependencies affix ironically along |
+68|95|9|4|20|19901.80|0.07|0.01|N|O|1998-06-27|1998-05-23|1998-07-02|NONE|REG AIR| excuses integrate fluffily |
+68|83|4|5|27|26543.16|0.03|0.06|N|O|1998-06-19|1998-06-25|1998-06-29|DELIVER IN PERSON|SHIP|ccounts. deposits use. furiously|
+68|103|6|6|30|30093.00|0.05|0.06|N|O|1998-08-11|1998-07-11|1998-08-14|NONE|RAIL|oxes are slyly blithely fin|
+68|140|6|7|41|42645.74|0.09|0.08|N|O|1998-06-24|1998-06-27|1998-07-06|NONE|SHIP|eposits nag special ideas. furiousl|
+69|116|10|1|48|48773.28|0.01|0.07|A|F|1994-08-17|1994-08-11|1994-09-08|NONE|TRUCK|regular epitaphs. carefully even ideas hag|
+69|105|10|2|32|32163.20|0.08|0.06|A|F|1994-08-24|1994-08-17|1994-08-31|NONE|REG AIR|s sleep carefully bold, |
+69|138|4|3|17|17648.21|0.09|0.00|A|F|1994-07-02|1994-07-07|1994-07-03|TAKE BACK RETURN|AIR|final, pending instr|
+69|38|9|4|3|2814.09|0.09|0.04|R|F|1994-06-06|1994-07-27|1994-06-15|NONE|MAIL| blithely final d|
+69|93|6|5|42|41709.78|0.07|0.04|R|F|1994-07-31|1994-07-26|1994-08-28|DELIVER IN PERSON|REG AIR|tect regular, speci|
+69|19|3|6|23|21137.23|0.05|0.00|A|F|1994-10-03|1994-08-06|1994-10-24|NONE|SHIP|nding accounts ca|
+70|65|2|1|8|7720.48|0.03|0.08|R|F|1994-01-12|1994-02-27|1994-01-14|TAKE BACK RETURN|FOB|ggle. carefully pending dependenc|
+70|197|10|2|13|14263.47|0.06|0.06|A|F|1994-03-03|1994-02-13|1994-03-26|COLLECT COD|AIR|lyly special packag|
+70|180|8|3|1|1080.18|0.03|0.05|R|F|1994-01-26|1994-03-05|1994-01-28|TAKE BACK RETURN|RAIL|quickly. fluffily unusual theodolites c|
+70|46|9|4|11|10406.44|0.01|0.05|A|F|1994-03-17|1994-03-17|1994-03-27|NONE|MAIL|alongside of the deposits. fur|
+70|38|9|5|37|34707.11|0.09|0.04|R|F|1994-02-13|1994-03-16|1994-02-21|COLLECT COD|MAIL|n accounts are. q|
+70|56|8|6|19|18164.95|0.06|0.03|A|F|1994-01-26|1994-02-17|1994-02-06|TAKE BACK RETURN|SHIP| packages wake pending accounts.|
+71|62|3|1|25|24051.50|0.09|0.07|N|O|1998-04-10|1998-04-22|1998-04-11|COLLECT COD|FOB|ckly. slyly|
+71|66|1|2|3|2898.18|0.09|0.07|N|O|1998-05-23|1998-04-03|1998-06-02|COLLECT COD|SHIP|y. pinto beans haggle after the|
+71|35|1|3|45|42076.35|0.00|0.07|N|O|1998-02-23|1998-03-20|1998-03-24|DELIVER IN PERSON|SHIP| ironic packages believe blithely a|
+71|97|9|4|33|32903.97|0.00|0.01|N|O|1998-04-12|1998-03-20|1998-04-15|NONE|FOB| serve quickly fluffily bold deposi|
+71|104|7|5|39|39159.90|0.08|0.06|N|O|1998-01-29|1998-04-07|1998-02-18|DELIVER IN PERSON|RAIL|l accounts sleep across the pack|
+71|196|9|6|34|37270.46|0.04|0.01|N|O|1998-03-05|1998-04-22|1998-03-30|DELIVER IN PERSON|TRUCK|s cajole. |
+96|124|7|1|23|23554.76|0.10|0.06|A|F|1994-07-19|1994-06-29|1994-07-25|DELIVER IN PERSON|TRUCK|ep-- carefully reg|
+96|136|7|2|30|31083.90|0.01|0.06|R|F|1994-06-03|1994-05-29|1994-06-22|DELIVER IN PERSON|TRUCK|e quickly even ideas. furiou|
+97|120|4|1|13|13261.56|0.00|0.02|R|F|1993-04-01|1993-04-04|1993-04-08|NONE|TRUCK|ayers cajole against the furiously|
+97|50|7|2|37|35151.85|0.02|0.06|A|F|1993-04-13|1993-03-30|1993-04-14|DELIVER IN PERSON|SHIP|ic requests boost carefully quic|
+97|78|6|3|19|18583.33|0.06|0.08|R|F|1993-05-14|1993-03-05|1993-05-25|TAKE BACK RETURN|RAIL|gifts. furiously ironic packages cajole. |
+98|41|2|1|28|26349.12|0.06|0.07|A|F|1994-12-24|1994-10-25|1995-01-16|COLLECT COD|REG AIR| pending, regular accounts s|
+98|110|7|2|1|1010.11|0.00|0.00|A|F|1994-12-01|1994-12-12|1994-12-15|DELIVER IN PERSON|TRUCK|. unusual instructions against|
+98|45|6|3|14|13230.56|0.05|0.02|A|F|1994-12-30|1994-11-22|1995-01-27|COLLECT COD|AIR| cajole furiously. blithely ironic ideas |
+98|168|9|4|10|10681.60|0.03|0.03|A|F|1994-10-23|1994-11-08|1994-11-09|COLLECT COD|RAIL| carefully. quickly ironic ideas|
+99|88|9|1|10|9880.80|0.02|0.01|A|F|1994-05-18|1994-06-03|1994-05-23|COLLECT COD|RAIL|kages. requ|
+99|124|5|2|5|5120.60|0.02|0.07|R|F|1994-05-06|1994-05-28|1994-05-20|TAKE BACK RETURN|RAIL|ests cajole fluffily waters. blithe|
+99|135|1|3|42|43475.46|0.02|0.02|A|F|1994-04-19|1994-05-18|1994-04-20|NONE|RAIL|kages are fluffily furiously ir|
+99|109|2|4|36|36327.60|0.09|0.02|A|F|1994-07-04|1994-04-17|1994-07-30|DELIVER IN PERSON|AIR|slyly. slyly e|
+100|63|4|1|28|26965.68|0.04|0.05|N|O|1998-05-08|1998-05-13|1998-06-07|COLLECT COD|TRUCK|sts haggle. slowl|
+100|116|10|2|22|22354.42|0.00|0.07|N|O|1998-06-24|1998-04-12|1998-06-29|DELIVER IN PERSON|SHIP|nto beans alongside of the fi|
+100|47|4|3|46|43563.84|0.03|0.04|N|O|1998-05-02|1998-04-10|1998-05-22|TAKE BACK RETURN|SHIP|ular accounts. even|
+100|39|10|4|14|13146.42|0.06|0.03|N|O|1998-05-22|1998-05-01|1998-06-03|COLLECT COD|MAIL|y. furiously ironic ideas gr|
+100|54|6|5|37|35299.85|0.05|0.00|N|O|1998-03-06|1998-04-16|1998-03-31|TAKE BACK RETURN|TRUCK|nd the quickly s|
+101|119|9|1|49|49936.39|0.10|0.00|N|O|1996-06-21|1996-05-27|1996-06-29|DELIVER IN PERSON|REG AIR|ts-- final packages sleep furiousl|
+101|164|9|2|36|38309.76|0.00|0.01|N|O|1996-05-19|1996-05-01|1996-06-04|DELIVER IN PERSON|AIR|tes. blithely pending dolphins x-ray f|
+101|139|5|3|12|12469.56|0.06|0.02|N|O|1996-03-29|1996-04-20|1996-04-12|COLLECT COD|MAIL|. quickly regular|
+102|89|10|1|37|36595.96|0.06|0.00|N|O|1997-07-24|1997-08-02|1997-08-07|TAKE BACK RETURN|SHIP|ully across the ideas. final deposit|
+102|170|5|2|34|36385.78|0.03|0.08|N|O|1997-08-09|1997-07-28|1997-08-26|TAKE BACK RETURN|SHIP|eposits cajole across|
+102|183|4|3|25|27079.50|0.01|0.01|N|O|1997-07-31|1997-07-24|1997-08-17|NONE|RAIL|bits. ironic accoun|
+102|62|7|4|15|14430.90|0.07|0.07|N|O|1997-06-02|1997-07-13|1997-06-04|DELIVER IN PERSON|SHIP|final packages. carefully even excu|
+103|195|9|1|6|6571.14|0.03|0.05|N|O|1996-10-11|1996-07-25|1996-10-28|NONE|FOB|cajole. carefully ex|
+103|11|5|2|37|33707.37|0.02|0.07|N|O|1996-09-17|1996-07-27|1996-09-20|TAKE BACK RETURN|MAIL|ies. quickly ironic requests use blithely|
+103|29|10|3|23|21367.46|0.01|0.04|N|O|1996-09-11|1996-09-18|1996-09-26|NONE|FOB|ironic accou|
+103|30|9|4|32|29760.96|0.01|0.07|N|O|1996-07-30|1996-08-06|1996-08-04|NONE|RAIL|kages doze. special, regular deposit|
+128|107|10|1|38|38269.80|0.06|0.01|A|F|1992-09-01|1992-08-27|1992-10-01|TAKE BACK RETURN|FOB| cajole careful|
+129|3|6|1|46|41538.00|0.08|0.02|R|F|1993-02-15|1993-01-24|1993-03-05|COLLECT COD|TRUCK|uietly bold theodolites. fluffil|
+129|186|7|2|36|39102.48|0.01|0.02|A|F|1992-11-25|1992-12-25|1992-12-09|TAKE BACK RETURN|REG AIR|packages are care|
+129|40|6|3|33|31021.32|0.04|0.06|A|F|1993-01-08|1993-02-14|1993-01-29|COLLECT COD|SHIP|sts nag bravely. fluffily|
+129|136|7|4|34|35228.42|0.00|0.01|R|F|1993-01-29|1993-02-14|1993-02-10|COLLECT COD|MAIL|quests. express ideas|
+129|32|8|5|24|22368.72|0.06|0.00|A|F|1992-12-07|1993-01-02|1992-12-11|TAKE BACK RETURN|FOB|uests. foxes cajole slyly after the ca|
+129|78|6|6|22|21517.54|0.06|0.01|R|F|1993-02-15|1993-01-31|1993-02-24|COLLECT COD|SHIP|e. fluffily regular |
+129|169|6|7|1|1069.16|0.05|0.04|R|F|1993-01-26|1993-01-08|1993-02-24|DELIVER IN PERSON|FOB|e carefully blithely bold dolp|
+130|129|10|1|14|14407.68|0.08|0.05|A|F|1992-08-15|1992-07-25|1992-09-13|COLLECT COD|RAIL| requests. final instruction|
+130|2|5|2|48|43296.00|0.03|0.02|R|F|1992-07-01|1992-07-12|1992-07-24|NONE|AIR|lithely alongside of the regu|
+130|12|3|3|18|16416.18|0.04|0.08|A|F|1992-07-04|1992-06-14|1992-07-29|DELIVER IN PERSON|MAIL| slyly ironic decoys abou|
+130|116|6|4|13|13209.43|0.09|0.02|R|F|1992-06-26|1992-07-29|1992-07-05|NONE|FOB| pending dolphins sleep furious|
+130|70|7|5|31|30072.17|0.06|0.05|R|F|1992-09-01|1992-07-18|1992-09-02|TAKE BACK RETURN|RAIL|thily about the ruth|
+131|168|7|1|45|48067.20|0.10|0.02|R|F|1994-09-14|1994-09-02|1994-10-04|NONE|FOB|ironic, bold accounts. careful|
+131|45|8|2|50|47252.00|0.02|0.04|A|F|1994-09-17|1994-08-10|1994-09-21|NONE|SHIP|ending requests. final, ironic pearls slee|
+131|190|1|3|4|4360.76|0.04|0.03|A|F|1994-09-20|1994-08-30|1994-09-23|COLLECT COD|REG AIR| are carefully slyly i|
+132|141|8|1|18|18740.52|0.00|0.08|R|F|1993-07-10|1993-08-05|1993-07-13|NONE|TRUCK|ges. platelets wake furio|
+132|120|1|2|43|43865.16|0.01|0.08|R|F|1993-09-01|1993-08-16|1993-09-22|NONE|TRUCK|y pending theodolites|
+132|115|6|3|32|32483.52|0.04|0.04|A|F|1993-07-12|1993-08-05|1993-08-05|COLLECT COD|TRUCK|d instructions hagg|
+132|29|2|4|23|21367.46|0.10|0.00|A|F|1993-06-16|1993-08-27|1993-06-23|DELIVER IN PERSON|AIR|refully blithely bold acco|
+133|104|7|1|27|27110.70|0.00|0.02|N|O|1997-12-21|1998-02-23|1997-12-27|TAKE BACK RETURN|MAIL|yly even gifts after the sl|
+133|177|5|2|12|12926.04|0.02|0.06|N|O|1997-12-02|1998-01-15|1997-12-29|DELIVER IN PERSON|REG AIR|ts cajole fluffily quickly i|
+133|118|8|3|29|29525.19|0.09|0.08|N|O|1998-02-28|1998-01-30|1998-03-09|DELIVER IN PERSON|RAIL| the carefully regular theodoli|
+133|90|1|4|11|10890.99|0.06|0.01|N|O|1998-03-21|1998-01-15|1998-04-04|DELIVER IN PERSON|REG AIR|e quickly across the dolphins|
+134|1|2|1|21|18921.00|0.00|0.03|A|F|1992-07-17|1992-07-08|1992-07-26|COLLECT COD|SHIP|s. quickly regular|
+134|165|2|2|35|37280.60|0.06|0.07|A|F|1992-08-23|1992-06-01|1992-08-24|NONE|MAIL|ajole furiously. instructio|
+134|189|10|3|26|28318.68|0.09|0.06|A|F|1992-06-20|1992-07-12|1992-07-16|NONE|RAIL| among the pending depos|
+134|145|6|4|47|49121.58|0.05|0.00|A|F|1992-08-16|1992-07-06|1992-08-28|NONE|REG AIR|s! carefully unusual requests boost careful|
+134|36|7|5|12|11232.36|0.05|0.02|A|F|1992-07-03|1992-06-01|1992-07-11|COLLECT COD|TRUCK|nts are quic|
+134|134|10|6|12|12409.56|0.00|0.00|A|F|1992-08-08|1992-07-07|1992-08-20|TAKE BACK RETURN|FOB|lyly regular pac|
+135|109|10|1|47|47427.70|0.06|0.08|N|O|1996-02-18|1996-01-01|1996-02-25|COLLECT COD|RAIL|ctions wake slyly abo|
+135|199|3|2|21|23082.99|0.00|0.07|N|O|1996-02-11|1996-01-12|1996-02-13|DELIVER IN PERSON|SHIP| deposits believe. furiously regular p|
+135|158|10|3|33|34918.95|0.02|0.00|N|O|1996-01-03|1995-11-21|1996-02-01|TAKE BACK RETURN|MAIL|ptotes boost slowly care|
+135|68|7|4|34|32914.04|0.02|0.03|N|O|1996-01-12|1996-01-19|1996-02-05|NONE|TRUCK|counts doze against the blithely ironi|
+135|137|8|5|20|20742.60|0.01|0.04|N|O|1996-01-25|1995-11-20|1996-02-09|NONE|MAIL|theodolites. quickly p|
+135|115|5|6|13|13196.43|0.04|0.02|N|O|1995-11-12|1995-12-22|1995-11-17|NONE|FOB|nal ideas. final instr|
+160|15|2|1|36|32940.36|0.07|0.01|N|O|1997-03-11|1997-03-11|1997-03-20|COLLECT COD|MAIL|old, ironic deposits are quickly abov|
+160|87|8|2|22|21715.76|0.00|0.04|N|O|1997-02-18|1997-03-05|1997-03-05|COLLECT COD|RAIL|ncies about the request|
+160|21|10|3|34|31314.68|0.01|0.05|N|O|1997-01-31|1997-03-13|1997-02-14|NONE|FOB|st sleep even gifts. dependencies along|
+161|103|10|1|19|19058.90|0.01|0.01|A|F|1994-12-13|1994-11-19|1994-12-26|DELIVER IN PERSON|TRUCK|, regular sheaves sleep along|
+162|190|1|1|2|2180.38|0.02|0.01|N|O|1995-09-02|1995-06-17|1995-09-08|COLLECT COD|FOB|es! final somas integrate|
+163|168|3|1|43|45930.88|0.01|0.00|N|O|1997-09-19|1997-11-19|1997-10-03|COLLECT COD|REG AIR|al, bold dependencies wake. iron|
+163|121|2|2|13|13274.56|0.01|0.04|N|O|1997-11-11|1997-10-18|1997-12-07|DELIVER IN PERSON|TRUCK|inal requests. even pinto beans hag|
+163|37|3|3|27|25299.81|0.04|0.08|N|O|1997-12-26|1997-11-28|1998-01-05|COLLECT COD|REG AIR|ously express dependen|
+163|193|5|4|5|5465.95|0.02|0.00|N|O|1997-11-17|1997-10-09|1997-12-05|DELIVER IN PERSON|TRUCK| must belie|
+163|127|2|5|12|12325.44|0.10|0.00|N|O|1997-12-18|1997-10-26|1997-12-22|COLLECT COD|TRUCK|ly blithe accounts cajole |
+163|191|4|6|20|21823.80|0.00|0.07|N|O|1997-09-27|1997-11-15|1997-10-07|TAKE BACK RETURN|FOB|tructions integrate b|
+164|92|4|1|26|25794.34|0.09|0.04|A|F|1993-01-04|1992-11-21|1993-01-07|NONE|RAIL|s. blithely special courts are blithel|
+164|19|6|2|24|22056.24|0.05|0.05|R|F|1992-12-22|1992-11-27|1993-01-06|NONE|AIR|side of the slyly unusual theodolites. f|
+164|126|9|3|38|38992.56|0.03|0.06|R|F|1992-12-04|1992-11-23|1993-01-02|TAKE BACK RETURN|AIR|counts cajole fluffily regular packages. b|
+164|18|2|4|32|29376.32|0.05|0.01|R|F|1992-12-21|1992-12-23|1992-12-28|COLLECT COD|RAIL|ts wake again|
+164|148|1|5|43|45070.02|0.06|0.01|R|F|1992-11-26|1993-01-03|1992-12-08|COLLECT COD|RAIL|y carefully regular dep|
+164|109|10|6|27|27245.70|0.10|0.04|R|F|1992-12-23|1993-01-16|1993-01-10|DELIVER IN PERSON|AIR|ayers wake carefully a|
+164|4|7|7|23|20792.00|0.09|0.04|A|F|1992-11-03|1992-12-02|1992-11-12|NONE|REG AIR|ress packages haggle ideas. blithely spec|
+165|34|5|1|3|2802.09|0.01|0.08|R|F|1993-03-29|1993-03-06|1993-04-12|DELIVER IN PERSON|REG AIR|riously requests. depos|
+165|162|7|2|43|45672.88|0.08|0.05|R|F|1993-02-27|1993-04-19|1993-03-03|DELIVER IN PERSON|TRUCK|jole slyly according |
+165|59|1|3|15|14385.75|0.00|0.05|R|F|1993-04-10|1993-03-29|1993-05-01|COLLECT COD|SHIP| bold packages mainta|
+165|140|1|4|49|50966.86|0.07|0.06|A|F|1993-02-20|1993-04-02|1993-03-10|COLLECT COD|REG AIR|uses sleep slyly ruthlessly regular a|
+165|156|4|5|27|28516.05|0.01|0.04|R|F|1993-04-27|1993-03-04|1993-05-13|NONE|MAIL|around the ironic, even orb|
+166|65|2|1|37|35707.22|0.09|0.03|N|O|1995-11-16|1995-10-17|1995-12-13|NONE|MAIL|lar frays wake blithely a|
+166|167|8|2|13|13873.08|0.09|0.05|N|O|1995-11-09|1995-11-18|1995-11-14|COLLECT COD|SHIP|fully above the blithely fina|
+166|100|2|3|41|41004.10|0.07|0.03|N|O|1995-11-13|1995-11-07|1995-12-08|COLLECT COD|FOB|hily along the blithely pending fo|
+166|46|3|4|8|7568.32|0.05|0.02|N|O|1995-12-30|1995-11-29|1996-01-29|DELIVER IN PERSON|RAIL|e carefully bold |
+167|102|3|1|28|28058.80|0.06|0.01|R|F|1993-02-19|1993-02-16|1993-03-03|DELIVER IN PERSON|TRUCK|sly during the u|
+167|172|2|2|27|28948.59|0.09|0.00|R|F|1993-05-01|1993-03-31|1993-05-31|TAKE BACK RETURN|FOB|eans affix furiously-- packages|
+192|98|1|1|23|22956.07|0.00|0.00|N|O|1998-02-05|1998-02-06|1998-03-07|TAKE BACK RETURN|AIR|ly pending theodolites haggle quickly fluf|
+192|162|7|2|20|21243.20|0.07|0.01|N|O|1998-03-13|1998-02-02|1998-03-31|TAKE BACK RETURN|REG AIR|tes. carefu|
+192|111|8|3|15|15166.65|0.09|0.01|N|O|1998-01-30|1998-02-10|1998-02-23|TAKE BACK RETURN|TRUCK|he ironic requests haggle about|
+192|197|1|4|2|2194.38|0.06|0.02|N|O|1998-03-06|1998-02-03|1998-03-24|COLLECT COD|SHIP|s. dependencies nag furiously alongside|
+192|83|4|5|25|24577.00|0.02|0.03|N|O|1998-02-15|1998-01-11|1998-03-17|COLLECT COD|TRUCK|. carefully regular|
+192|142|9|6|45|46896.30|0.00|0.05|N|O|1998-03-11|1998-01-09|1998-04-03|NONE|MAIL|equests. ideas sleep idea|
+193|93|5|1|9|8937.81|0.06|0.06|A|F|1993-09-17|1993-10-08|1993-09-30|COLLECT COD|TRUCK|against the fluffily regular d|
+193|154|6|2|15|15812.25|0.02|0.07|R|F|1993-11-22|1993-10-09|1993-12-05|TAKE BACK RETURN|SHIP|ffily. regular packages d|
+193|94|6|3|23|22864.07|0.06|0.05|A|F|1993-08-21|1993-10-11|1993-09-02|DELIVER IN PERSON|TRUCK|ly even accounts wake blithely bold|
+194|3|6|1|17|15351.00|0.05|0.04|R|F|1992-05-24|1992-05-22|1992-05-30|COLLECT COD|AIR| regular deposi|
+194|184|5|2|1|1084.18|0.04|0.06|R|F|1992-04-30|1992-05-18|1992-05-23|NONE|REG AIR| regular theodolites. regular, iron|
+194|66|1|3|13|12558.78|0.08|0.08|A|F|1992-05-07|1992-06-18|1992-05-10|NONE|AIR|about the blit|
+194|146|7|4|36|37661.04|0.00|0.05|R|F|1992-05-21|1992-05-18|1992-05-27|TAKE BACK RETURN|RAIL|pecial packages wake after the slyly r|
+194|57|2|5|8|7656.40|0.04|0.00|R|F|1992-07-06|1992-06-25|1992-07-11|COLLECT COD|FOB|uriously unusual excuses|
+194|149|6|6|16|16786.24|0.06|0.03|A|F|1992-05-14|1992-06-14|1992-05-21|TAKE BACK RETURN|TRUCK|y regular requests. furious|
+194|168|7|7|21|22431.36|0.02|0.01|R|F|1992-05-06|1992-05-20|1992-05-07|COLLECT COD|REG AIR|accounts detect quickly dogged |
+195|85|6|1|6|5910.48|0.04|0.02|A|F|1994-01-09|1994-03-27|1994-01-28|COLLECT COD|REG AIR|y, even deposits haggle carefully. bli|
+195|94|8|2|41|40757.69|0.05|0.07|A|F|1994-02-24|1994-02-11|1994-03-20|NONE|TRUCK|rts detect in place of t|
+195|86|7|3|34|33526.72|0.08|0.08|R|F|1994-01-31|1994-02-11|1994-02-12|NONE|TRUCK| cajole furiously bold i|
+195|86|7|4|41|40429.28|0.06|0.04|R|F|1994-03-14|1994-03-13|1994-04-09|COLLECT COD|RAIL|ggle fluffily foxes. fluffily ironic ex|
+196|136|7|1|19|19686.47|0.03|0.02|R|F|1993-04-17|1993-05-27|1993-04-30|NONE|SHIP|sts maintain foxes. furiously regular p|
+196|10|3|2|15|13650.15|0.03|0.04|A|F|1993-07-05|1993-05-08|1993-07-06|TAKE BACK RETURN|SHIP|s accounts. furio|
+197|99|1|1|39|38964.51|0.02|0.04|N|O|1995-07-21|1995-07-01|1995-08-14|TAKE BACK RETURN|AIR|press accounts. daringly sp|
+197|178|8|2|8|8625.36|0.09|0.02|A|F|1995-04-17|1995-07-01|1995-04-27|DELIVER IN PERSON|SHIP|y blithely even deposits. blithely fina|
+197|156|4|3|17|17954.55|0.06|0.02|N|O|1995-08-02|1995-06-23|1995-08-03|COLLECT COD|REG AIR|ts. careful|
+197|18|5|4|25|22950.25|0.04|0.01|N|F|1995-06-13|1995-05-23|1995-06-24|TAKE BACK RETURN|FOB|s-- quickly final accounts|
+197|42|9|5|14|13188.56|0.09|0.01|R|F|1995-05-08|1995-05-24|1995-05-12|TAKE BACK RETURN|RAIL|use slyly slyly silent depo|
+197|106|1|6|1|1006.10|0.07|0.05|N|O|1995-07-15|1995-06-21|1995-08-11|COLLECT COD|RAIL| even, thin dependencies sno|
+198|57|8|1|33|31582.65|0.07|0.02|N|O|1998-01-05|1998-03-20|1998-01-10|TAKE BACK RETURN|TRUCK|carefully caref|
+198|16|10|2|20|18320.20|0.03|0.00|N|O|1998-01-15|1998-03-31|1998-01-25|DELIVER IN PERSON|FOB|carefully final escapades a|
+198|149|2|3|15|15737.10|0.04|0.02|N|O|1998-04-12|1998-02-26|1998-04-15|COLLECT COD|MAIL|es. quickly pending deposits s|
+198|11|5|4|35|31885.35|0.08|0.02|N|O|1998-02-27|1998-03-23|1998-03-14|TAKE BACK RETURN|RAIL|ests nod quickly furiously sly pinto be|
+198|102|3|5|33|33069.30|0.02|0.01|N|O|1998-03-22|1998-03-12|1998-04-14|DELIVER IN PERSON|SHIP|ending foxes acr|
+199|133|9|1|50|51656.50|0.02|0.00|N|O|1996-06-12|1996-06-03|1996-07-04|DELIVER IN PERSON|MAIL|essly regular ideas boost sly|
+199|134|5|2|30|31023.90|0.08|0.05|N|O|1996-03-27|1996-05-29|1996-04-14|NONE|TRUCK|ilent packages doze quickly. thinly |
+224|151|2|1|16|16818.40|0.04|0.00|A|F|1994-08-01|1994-07-30|1994-08-27|DELIVER IN PERSON|MAIL|y unusual foxes |
+224|109|2|2|34|34309.40|0.04|0.08|R|F|1994-07-13|1994-08-25|1994-07-31|COLLECT COD|TRUCK| carefully. final platelets |
+224|190|1|3|41|44697.79|0.07|0.04|A|F|1994-09-01|1994-09-15|1994-09-02|TAKE BACK RETURN|SHIP|after the furiou|
+224|167|4|4|12|12805.92|0.08|0.06|R|F|1994-10-12|1994-08-29|1994-10-20|DELIVER IN PERSON|MAIL|uriously regular packages. slyly fina|
+224|94|7|5|45|44734.05|0.07|0.07|R|F|1994-08-14|1994-09-02|1994-08-27|COLLECT COD|AIR|leep furiously regular requests. furiousl|
+224|51|3|6|4|3804.20|0.02|0.00|R|F|1994-09-08|1994-08-24|1994-10-04|DELIVER IN PERSON|FOB|tructions |
+225|172|3|1|4|4288.68|0.09|0.07|N|O|1995-08-05|1995-08-19|1995-09-03|TAKE BACK RETURN|SHIP|ng the ironic packages. asymptotes among |
+225|131|7|2|3|3093.39|0.00|0.08|N|O|1995-07-25|1995-07-08|1995-08-17|DELIVER IN PERSON|REG AIR| fluffily about the carefully bold a|
+225|199|2|3|45|49463.55|0.06|0.01|N|O|1995-08-17|1995-08-20|1995-08-30|TAKE BACK RETURN|FOB|the slyly even platelets use aro|
+225|147|4|4|24|25131.36|0.00|0.06|N|O|1995-09-23|1995-08-05|1995-10-16|COLLECT COD|MAIL|ironic accounts are final account|
+225|8|5|5|31|28148.00|0.04|0.06|N|O|1995-06-21|1995-07-24|1995-07-04|TAKE BACK RETURN|FOB|special platelets. quickly r|
+225|132|8|6|12|12385.56|0.00|0.00|A|F|1995-06-04|1995-07-15|1995-06-08|COLLECT COD|MAIL| unusual requests. bus|
+225|142|1|7|44|45854.16|0.10|0.06|N|O|1995-09-22|1995-08-16|1995-10-22|NONE|REG AIR|leep slyly |
+226|97|9|1|4|3988.36|0.00|0.00|R|F|1993-03-31|1993-04-30|1993-04-10|NONE|TRUCK|c foxes integrate carefully against th|
+226|138|4|2|46|47753.98|0.06|0.01|A|F|1993-07-06|1993-04-24|1993-07-13|COLLECT COD|FOB|s. carefully bold accounts cajol|
+226|38|4|3|35|32831.05|0.09|0.03|A|F|1993-03-31|1993-05-18|1993-04-01|NONE|RAIL|osits cajole. final, even foxes a|
+226|41|10|4|45|42346.80|0.10|0.02|R|F|1993-04-17|1993-05-27|1993-05-11|DELIVER IN PERSON|AIR| carefully pending pi|
+226|118|8|5|2|2036.22|0.07|0.02|R|F|1993-03-26|1993-04-13|1993-04-20|TAKE BACK RETURN|SHIP|al platelets. express somas |
+226|83|4|6|48|47187.84|0.02|0.00|A|F|1993-06-11|1993-05-15|1993-06-19|NONE|REG AIR|efully silent packages. final deposit|
+226|118|8|7|14|14253.54|0.09|0.00|R|F|1993-05-20|1993-06-05|1993-05-27|COLLECT COD|MAIL|ep carefully regular accounts. ironic|
+227|166|1|1|19|20257.04|0.05|0.06|N|O|1995-12-10|1996-01-30|1995-12-26|NONE|RAIL|s cajole furiously a|
+227|175|3|2|24|25804.08|0.07|0.07|N|O|1996-02-03|1995-12-24|1996-02-12|COLLECT COD|SHIP|uses across the blithe dependencies cajol|
+228|5|8|1|3|2715.00|0.10|0.08|A|F|1993-05-20|1993-04-08|1993-05-26|DELIVER IN PERSON|SHIP|ckages. sly|
+229|84|5|1|20|19681.60|0.02|0.03|R|F|1994-01-11|1994-01-31|1994-01-26|DELIVER IN PERSON|REG AIR|le. instructions use across the quickly fin|
+229|129|10|2|29|29844.48|0.07|0.00|A|F|1994-03-15|1994-03-02|1994-03-26|COLLECT COD|SHIP|s, final request|
+229|79|10|3|28|27413.96|0.02|0.02|R|F|1994-02-10|1994-02-02|1994-03-10|DELIVER IN PERSON|FOB| final, regular requests. platel|
+229|177|6|4|3|3231.51|0.02|0.08|R|F|1994-03-22|1994-03-24|1994-04-04|DELIVER IN PERSON|REG AIR|posits. furiously regular theodol|
+229|156|1|5|33|34852.95|0.03|0.06|R|F|1994-03-25|1994-02-11|1994-04-13|NONE|FOB| deposits; bold, ruthless theodolites|
+229|106|9|6|29|29176.90|0.04|0.00|R|F|1994-01-14|1994-02-16|1994-01-22|NONE|FOB|uriously pending |
+230|186|7|1|46|49964.28|0.09|0.00|R|F|1994-02-03|1994-01-15|1994-02-23|TAKE BACK RETURN|SHIP|old packages ha|
+230|195|7|2|6|6571.14|0.03|0.08|A|F|1994-01-26|1994-01-25|1994-02-13|NONE|REG AIR| sleep furiously about the p|
+230|8|5|3|1|908.00|0.07|0.06|R|F|1994-01-22|1994-01-03|1994-02-05|TAKE BACK RETURN|RAIL|blithely unusual dolphins. bold, ex|
+230|10|3|4|44|40040.44|0.08|0.06|R|F|1994-02-09|1994-01-18|1994-03-11|NONE|MAIL|deposits integrate slyly sile|
+230|19|9|5|8|7352.08|0.09|0.06|R|F|1993-11-03|1994-01-20|1993-11-11|TAKE BACK RETURN|TRUCK|g the instructions. fluffil|
+230|34|10|6|8|7472.24|0.00|0.05|R|F|1993-11-21|1994-01-05|1993-12-19|TAKE BACK RETURN|FOB|nal ideas. silent, reg|
+231|159|10|1|16|16946.40|0.04|0.08|R|F|1994-11-20|1994-10-29|1994-12-17|TAKE BACK RETURN|AIR|e furiously ironic pinto beans.|
+231|84|5|2|46|45267.68|0.04|0.05|R|F|1994-12-13|1994-12-02|1994-12-14|DELIVER IN PERSON|SHIP|affix blithely. bold requests among the f|
+231|199|1|3|50|54959.50|0.09|0.01|A|F|1994-12-11|1994-12-14|1994-12-13|NONE|RAIL|onic packages haggle fluffily a|
+231|57|8|4|31|29668.55|0.08|0.02|A|F|1994-11-05|1994-12-27|1994-11-30|TAKE BACK RETURN|SHIP|iously special decoys wake q|
+256|89|10|1|22|21759.76|0.09|0.02|R|F|1994-01-12|1993-12-28|1994-01-26|COLLECT COD|FOB|ke quickly ironic, ironic deposits. reg|
+256|119|6|2|40|40764.40|0.10|0.01|A|F|1993-11-30|1993-12-13|1993-12-02|NONE|FOB|nal theodolites. deposits cajole s|
+256|130|9|3|45|46355.85|0.02|0.08|R|F|1994-01-14|1994-01-17|1994-02-10|COLLECT COD|SHIP| grouches. ideas wake quickly ar|
+257|147|8|1|7|7329.98|0.05|0.02|N|O|1998-06-18|1998-05-15|1998-06-27|COLLECT COD|FOB|ackages sleep bold realms. f|
+258|107|4|1|8|8056.80|0.00|0.07|R|F|1994-01-20|1994-03-21|1994-02-09|NONE|REG AIR|ully about the fluffily silent dependencies|
+258|197|1|2|40|43887.60|0.10|0.01|A|F|1994-03-13|1994-02-23|1994-04-05|DELIVER IN PERSON|FOB|silent frets nod daringly busy, bold|
+258|162|3|3|45|47797.20|0.07|0.07|R|F|1994-03-04|1994-02-13|1994-03-30|DELIVER IN PERSON|TRUCK|regular excuses-- fluffily ruthl|
+258|133|9|4|31|32027.03|0.02|0.05|A|F|1994-04-20|1994-03-20|1994-04-28|COLLECT COD|REG AIR| slyly blithely special mul|
+258|36|2|5|25|23400.75|0.08|0.02|A|F|1994-04-13|1994-02-26|1994-04-29|TAKE BACK RETURN|TRUCK|leep pending packages.|
+258|147|4|6|36|37697.04|0.09|0.04|A|F|1994-01-11|1994-03-04|1994-01-18|DELIVER IN PERSON|AIR|nic asymptotes. slyly silent r|
+259|99|10|1|14|13987.26|0.00|0.08|A|F|1993-12-17|1993-12-09|1993-12-31|COLLECT COD|SHIP|ons against the express acco|
+259|162|1|2|14|14870.24|0.03|0.05|R|F|1993-11-10|1993-11-20|1993-11-17|DELIVER IN PERSON|FOB|ully even, regul|
+259|24|5|3|42|38808.84|0.09|0.00|R|F|1993-10-20|1993-11-18|1993-11-12|NONE|TRUCK|the slyly ironic pinto beans. fi|
+259|196|10|4|3|3288.57|0.08|0.06|R|F|1993-10-04|1993-11-07|1993-10-14|TAKE BACK RETURN|SHIP|ng slyly at the accounts.|
+259|193|6|5|6|6559.14|0.00|0.05|R|F|1993-12-05|1993-12-22|1993-12-21|COLLECT COD|TRUCK| requests sleep|
+260|156|7|1|50|52807.50|0.07|0.08|N|O|1997-03-24|1997-02-09|1997-04-20|TAKE BACK RETURN|REG AIR|c deposits |
+260|183|4|2|26|28162.68|0.02|0.07|N|O|1996-12-12|1997-02-06|1996-12-15|NONE|TRUCK|ld theodolites boost fl|
+260|42|1|3|27|25435.08|0.05|0.08|N|O|1997-03-23|1997-02-15|1997-04-22|TAKE BACK RETURN|RAIL|ions according to the|
+260|6|1|4|29|26274.00|0.10|0.06|N|O|1997-03-15|1997-01-14|1997-04-13|NONE|MAIL|fluffily even asymptotes. express wa|
+260|96|9|5|44|43827.96|0.01|0.05|N|O|1997-03-26|1997-02-03|1997-04-19|DELIVER IN PERSON|MAIL|above the blithely ironic instr|
+261|2|7|1|34|30668.00|0.05|0.08|R|F|1993-08-18|1993-09-24|1993-08-20|COLLECT COD|REG AIR|c packages. asymptotes da|
+261|66|7|2|20|19321.20|0.00|0.06|R|F|1993-10-21|1993-08-02|1993-11-04|DELIVER IN PERSON|RAIL|ites hinder |
+261|174|3|3|28|30076.76|0.08|0.03|R|F|1993-07-24|1993-08-20|1993-08-05|COLLECT COD|AIR|ironic packages nag slyly. carefully fin|
+261|119|3|4|49|49936.39|0.04|0.05|R|F|1993-09-12|1993-08-31|1993-10-07|COLLECT COD|SHIP|ions. bold accounts |
+261|61|6|5|49|47091.94|0.01|0.08|A|F|1993-09-29|1993-09-08|1993-10-01|COLLECT COD|SHIP| pinto beans haggle slyly furiously pending|
+261|97|9|6|20|19941.80|0.06|0.06|A|F|1993-10-15|1993-09-05|1993-11-07|NONE|AIR|ing to the special, ironic deposi|
+262|192|3|1|39|42595.41|0.01|0.05|N|O|1996-01-15|1996-02-18|1996-01-28|COLLECT COD|RAIL|usual, regular requests|
+262|61|6|2|33|31714.98|0.09|0.03|N|O|1996-03-10|1996-01-31|1996-03-27|TAKE BACK RETURN|AIR|atelets sleep furiously. requests cajole. b|
+262|59|1|3|35|33566.75|0.05|0.08|N|O|1996-03-12|1996-02-14|1996-04-11|COLLECT COD|MAIL|lites cajole along the pending packag|
+263|24|9|1|22|20328.44|0.06|0.08|R|F|1994-08-24|1994-06-20|1994-09-09|NONE|FOB|efully express fo|
+263|85|6|2|9|8865.72|0.08|0.00|A|F|1994-07-21|1994-07-16|1994-08-08|TAKE BACK RETURN|TRUCK|lms wake bl|
+263|143|2|3|50|52157.00|0.06|0.04|R|F|1994-08-18|1994-07-31|1994-08-22|NONE|TRUCK|re the packages. special|
+288|51|3|1|31|29482.55|0.00|0.03|N|O|1997-03-17|1997-04-28|1997-04-06|TAKE BACK RETURN|AIR|instructions wa|
+288|117|1|2|49|49838.39|0.08|0.05|N|O|1997-04-19|1997-05-19|1997-05-18|TAKE BACK RETURN|TRUCK|ic excuses sleep always spe|
+288|99|10|3|36|35967.24|0.02|0.02|N|O|1997-02-22|1997-05-07|1997-03-07|TAKE BACK RETURN|TRUCK|yly pending excu|
+288|79|10|4|19|18602.33|0.07|0.07|N|O|1997-03-14|1997-04-04|1997-03-26|NONE|MAIL|deposits. blithely quick courts ar|
+288|162|9|5|31|32926.96|0.10|0.04|N|O|1997-05-29|1997-04-24|1997-06-20|TAKE BACK RETURN|RAIL|ns. fluffily|
+289|174|2|1|25|26854.25|0.07|0.05|N|O|1997-03-18|1997-05-05|1997-04-15|DELIVER IN PERSON|FOB|out the quickly bold theodol|
+289|112|2|2|6|6072.66|0.06|0.05|N|O|1997-02-18|1997-05-08|1997-03-19|DELIVER IN PERSON|SHIP|d packages use fluffily furiously|
+289|17|4|3|44|40348.44|0.10|0.08|N|O|1997-06-05|1997-04-20|1997-07-02|COLLECT COD|MAIL|ly ironic foxes. asymptotes |
+289|40|6|4|48|45121.92|0.01|0.08|N|O|1997-03-14|1997-03-30|1997-03-24|DELIVER IN PERSON|RAIL|sits cajole. bold pinto beans x-ray fl|
+289|47|4|5|13|12311.52|0.10|0.03|N|O|1997-06-08|1997-04-06|1997-06-18|TAKE BACK RETURN|REG AIR|ts. quickly bold deposits alongside|
+290|6|1|1|35|31710.00|0.01|0.02|R|F|1994-04-01|1994-02-05|1994-04-27|NONE|MAIL|ove the final foxes detect slyly fluffily|
+290|129|4|2|2|2058.24|0.05|0.04|A|F|1994-01-30|1994-02-13|1994-02-21|TAKE BACK RETURN|TRUCK|. permanently furious reques|
+290|2|5|3|5|4510.00|0.03|0.05|A|F|1994-01-19|1994-02-24|1994-01-27|NONE|MAIL|ans integrate. requests sleep. fur|
+290|124|9|4|23|23554.76|0.05|0.08|R|F|1994-03-14|1994-02-21|1994-04-09|NONE|AIR|refully unusual packages. |
+291|123|6|1|21|21485.52|0.05|0.07|A|F|1994-05-26|1994-05-10|1994-06-23|COLLECT COD|TRUCK|y quickly regular theodolites. final t|
+291|138|9|2|19|19724.47|0.08|0.02|R|F|1994-06-14|1994-04-25|1994-06-19|NONE|REG AIR|e. ruthlessly final accounts after the|
+291|61|8|3|30|28831.80|0.10|0.02|R|F|1994-03-22|1994-04-30|1994-03-24|DELIVER IN PERSON|FOB| fluffily regular deposits. quickl|
+292|154|5|1|8|8433.20|0.10|0.03|R|F|1992-02-18|1992-03-30|1992-03-18|DELIVER IN PERSON|RAIL|sily bold deposits alongside of the ex|
+292|100|1|2|24|24002.40|0.08|0.04|R|F|1992-03-24|1992-03-06|1992-04-20|COLLECT COD|TRUCK| bold, pending theodolites u|
+293|9|6|1|14|12726.00|0.02|0.05|R|F|1992-10-19|1992-12-23|1992-11-10|DELIVER IN PERSON|SHIP|es. packages above the|
+293|187|8|2|11|11958.98|0.10|0.04|R|F|1992-12-24|1992-12-01|1993-01-12|COLLECT COD|MAIL| affix carefully quickly special idea|
+293|118|8|3|13|13235.43|0.04|0.02|A|F|1992-12-17|1992-12-26|1992-12-22|COLLECT COD|RAIL| wake after the quickly even deposits. bli|
+294|60|2|1|31|29761.86|0.00|0.01|R|F|1993-08-06|1993-08-19|1993-08-13|TAKE BACK RETURN|AIR|le fluffily along the quick|
+295|198|10|1|29|31847.51|0.02|0.07|A|F|1994-11-09|1994-12-08|1994-12-07|COLLECT COD|MAIL|inst the carefully ironic pinto beans. blit|
+295|92|6|2|26|25794.34|0.04|0.03|R|F|1994-12-13|1994-11-30|1995-01-06|DELIVER IN PERSON|AIR|ts above the slyly regular requests x-ray q|
+295|16|10|3|8|7328.08|0.10|0.07|R|F|1995-01-13|1994-11-17|1995-01-25|NONE|TRUCK| final instructions h|
+295|61|10|4|26|24987.56|0.10|0.04|A|F|1995-01-12|1994-11-22|1995-01-22|DELIVER IN PERSON|MAIL| carefully iron|
+320|5|2|1|30|27150.00|0.05|0.01|N|O|1997-12-04|1998-01-21|1997-12-13|NONE|RAIL| ironic, final accounts wake quick de|
+320|193|5|2|13|14211.47|0.03|0.00|N|O|1997-12-16|1997-12-26|1997-12-17|TAKE BACK RETURN|AIR|he furiously regular pinto beans. car|
+321|1|8|1|21|18921.00|0.01|0.08|A|F|1993-07-18|1993-04-24|1993-08-13|TAKE BACK RETURN|REG AIR|hockey players sleep slyly sl|
+321|141|4|2|41|42686.74|0.08|0.07|R|F|1993-06-21|1993-06-07|1993-07-09|NONE|REG AIR|special packages shall have to doze blit|
+322|153|8|1|12|12637.80|0.08|0.07|A|F|1992-06-29|1992-05-30|1992-07-11|NONE|AIR|ular theodolites promise qu|
+322|44|5|2|48|45313.92|0.02|0.07|A|F|1992-06-11|1992-06-16|1992-06-26|COLLECT COD|RAIL|dolites detect qu|
+322|13|3|3|20|18260.20|0.04|0.01|R|F|1992-04-26|1992-05-04|1992-05-22|DELIVER IN PERSON|MAIL|ckly toward |
+322|184|5|4|10|10841.80|0.06|0.03|R|F|1992-04-12|1992-05-13|1992-04-14|DELIVER IN PERSON|AIR| deposits grow slyly according to th|
+322|12|2|5|35|31920.35|0.07|0.06|A|F|1992-07-17|1992-05-03|1992-08-14|TAKE BACK RETURN|RAIL|egular accounts cajole carefully. even d|
+322|34|5|6|3|2802.09|0.08|0.05|A|F|1992-07-03|1992-05-10|1992-07-28|NONE|AIR|ending, ironic deposits along the blith|
+322|38|4|7|5|4690.15|0.01|0.02|A|F|1992-04-15|1992-05-12|1992-04-26|COLLECT COD|REG AIR| special grouches sleep quickly instructio|
+323|164|9|1|50|53208.00|0.05|0.04|A|F|1994-04-20|1994-04-25|1994-05-12|DELIVER IN PERSON|REG AIR|cial requests |
+323|96|8|2|18|17929.62|0.06|0.07|R|F|1994-04-13|1994-06-02|1994-05-10|DELIVER IN PERSON|TRUCK|posits cajole furiously pinto beans. |
+323|143|4|3|9|9388.26|0.07|0.04|A|F|1994-06-26|1994-06-10|1994-07-13|COLLECT COD|TRUCK|nic accounts. regular, regular pack|
+324|200|3|1|26|28605.20|0.07|0.01|R|F|1992-04-19|1992-05-28|1992-05-12|DELIVER IN PERSON|RAIL|ross the slyly regular s|
+325|159|1|1|34|36011.10|0.09|0.04|A|F|1993-10-28|1993-12-13|1993-11-17|TAKE BACK RETURN|MAIL|ly bold deposits. always iron|
+325|186|7|2|5|5430.90|0.07|0.08|A|F|1994-01-02|1994-01-05|1994-01-04|TAKE BACK RETURN|MAIL| theodolites. |
+325|19|3|3|35|32165.35|0.07|0.07|A|F|1993-12-06|1994-01-03|1993-12-26|DELIVER IN PERSON|REG AIR|packages wa|
+326|180|9|1|41|44287.38|0.06|0.03|N|O|1995-08-30|1995-07-09|1995-09-12|DELIVER IN PERSON|TRUCK|ily quickly bold ideas.|
+326|20|4|2|38|34960.76|0.02|0.08|N|O|1995-09-12|1995-08-23|1995-09-14|COLLECT COD|RAIL|es sleep slyly. carefully regular inst|
+326|184|5|3|25|27104.50|0.03|0.04|N|O|1995-08-03|1995-07-27|1995-08-16|NONE|AIR|ily furiously unusual accounts. |
+326|85|6|4|5|4925.40|0.03|0.08|N|O|1995-07-29|1995-07-13|1995-08-12|NONE|REG AIR|deas sleep according to the sometimes spe|
+326|35|6|5|31|28985.93|0.04|0.08|N|O|1995-09-27|1995-07-06|1995-10-22|NONE|TRUCK|cies sleep quick|
+326|157|9|6|41|43343.15|0.02|0.00|N|O|1995-07-05|1995-07-23|1995-07-20|TAKE BACK RETURN|REG AIR|to beans wake before the furiously re|
+326|43|10|7|47|44322.88|0.04|0.04|N|O|1995-09-16|1995-07-04|1995-10-04|NONE|REG AIR| special accounts sleep |
+327|144|3|1|16|16706.24|0.03|0.01|N|O|1995-07-05|1995-06-07|1995-07-09|TAKE BACK RETURN|TRUCK|cial ideas sleep af|
+327|42|9|2|9|8478.36|0.09|0.05|A|F|1995-05-24|1995-07-11|1995-06-05|NONE|AIR| asymptotes are fu|
+352|64|5|1|17|16389.02|0.07|0.05|R|F|1994-06-02|1994-05-31|1994-06-29|NONE|FOB|pending deposits sleep furiously |
+353|120|7|1|41|41824.92|0.00|0.06|A|F|1994-03-25|1994-03-31|1994-03-30|DELIVER IN PERSON|AIR|refully final theodoli|
+353|148|9|2|29|30396.06|0.09|0.00|A|F|1994-01-11|1994-03-19|1994-02-09|COLLECT COD|FOB|ctions impr|
+353|135|1|3|12|12421.56|0.06|0.01|R|F|1994-01-02|1994-03-26|1994-01-19|DELIVER IN PERSON|RAIL|g deposits cajole |
+353|78|7|4|46|44991.22|0.00|0.04|A|F|1994-04-14|1994-01-31|1994-05-05|DELIVER IN PERSON|FOB| ironic dolphins |
+353|117|4|5|9|9153.99|0.02|0.02|A|F|1994-03-15|1994-03-20|1994-03-18|TAKE BACK RETURN|RAIL|ual accounts! carefu|
+353|103|4|6|39|39120.90|0.02|0.05|A|F|1994-01-15|1994-03-30|1994-02-01|NONE|MAIL|losely quickly even accounts. c|
+354|50|7|1|14|13300.70|0.08|0.04|N|O|1996-04-12|1996-06-03|1996-05-08|NONE|SHIP|quickly regular grouches will eat. careful|
+354|194|8|2|24|26260.56|0.01|0.01|N|O|1996-05-08|1996-05-17|1996-06-07|DELIVER IN PERSON|AIR|y silent requests. regular, even accounts|
+354|59|10|3|50|47952.50|0.08|0.05|N|O|1996-03-21|1996-05-20|1996-04-04|COLLECT COD|TRUCK|to beans s|
+354|107|4|4|7|7049.70|0.06|0.01|N|O|1996-05-07|1996-04-18|1996-05-24|NONE|MAIL|ously idly ironic accounts-- quickl|
+354|31|2|5|18|16758.54|0.04|0.08|N|O|1996-03-31|1996-05-13|1996-04-27|DELIVER IN PERSON|RAIL| about the carefully unusual |
+354|62|1|6|36|34634.16|0.03|0.02|N|O|1996-03-19|1996-05-29|1996-03-30|NONE|AIR|onic requests thrash bold g|
+354|5|10|7|14|12670.00|0.01|0.07|N|O|1996-07-06|1996-06-08|1996-07-10|TAKE BACK RETURN|MAIL|t thinly above the ironic, |
+355|114|1|1|31|31437.41|0.09|0.07|A|F|1994-07-13|1994-08-18|1994-07-18|DELIVER IN PERSON|FOB|y unusual, ironic|
+355|97|1|2|41|40880.69|0.05|0.00|A|F|1994-08-15|1994-07-19|1994-09-06|DELIVER IN PERSON|TRUCK| deposits. carefully r|
+356|46|7|1|4|3784.16|0.10|0.01|A|F|1994-07-28|1994-08-01|1994-08-04|DELIVER IN PERSON|REG AIR| the dependencies nod unusual, final ac|
+356|108|3|2|48|48388.80|0.02|0.03|R|F|1994-08-12|1994-07-31|1994-08-26|NONE|FOB|unusual packages. furiously |
+356|119|3|3|35|35668.85|0.08|0.07|R|F|1994-10-14|1994-07-31|1994-10-23|COLLECT COD|TRUCK|s. unusual, final|
+356|56|1|4|41|39198.05|0.07|0.05|A|F|1994-09-28|1994-09-20|1994-10-07|COLLECT COD|SHIP| according to the express foxes will|
+356|125|8|5|37|37929.44|0.05|0.03|A|F|1994-07-15|1994-08-24|1994-08-09|DELIVER IN PERSON|FOB|ndencies are since the packag|
+357|114|5|1|26|26366.86|0.06|0.03|N|O|1996-12-28|1996-11-26|1997-01-13|NONE|FOB| carefully pending accounts use a|
+357|186|7|2|36|39102.48|0.07|0.06|N|O|1996-12-28|1996-11-13|1997-01-24|DELIVER IN PERSON|AIR|d the carefully even requests. |
+357|165|2|3|32|34085.12|0.05|0.07|N|O|1997-01-28|1996-12-29|1997-02-14|NONE|MAIL|y above the carefully final accounts|
+358|191|3|1|41|44738.79|0.06|0.01|A|F|1993-11-18|1993-11-14|1993-11-28|NONE|TRUCK|ely frets. furious deposits sleep |
+358|190|1|2|32|34886.08|0.05|0.08|A|F|1993-10-18|1993-12-12|1993-10-31|NONE|TRUCK|y final foxes sleep blithely sl|
+358|169|6|3|40|42766.40|0.09|0.01|A|F|1993-12-05|1993-11-04|1994-01-01|COLLECT COD|MAIL|ng the ironic theo|
+358|97|10|4|15|14956.35|0.08|0.08|A|F|1993-10-04|1993-12-17|1993-10-23|TAKE BACK RETURN|MAIL|out the blithely ironic deposits slee|
+358|29|2|5|18|16722.36|0.01|0.02|R|F|1993-10-07|1993-11-01|1993-10-26|COLLECT COD|SHIP|olphins haggle ironic accounts. f|
+358|162|3|6|32|33989.12|0.03|0.05|R|F|1993-12-21|1993-11-06|1994-01-17|DELIVER IN PERSON|RAIL|lyly express deposits |
+358|83|4|7|45|44238.60|0.05|0.02|A|F|1993-12-08|1993-10-29|1993-12-30|NONE|REG AIR|to beans. regular, unusual deposits sl|
+359|166|7|1|30|31984.80|0.00|0.08|A|F|1995-01-06|1995-02-20|1995-01-20|TAKE BACK RETURN|AIR|uses detect spec|
+359|12|9|2|18|16416.18|0.00|0.03|A|F|1995-01-27|1995-03-18|1995-01-31|DELIVER IN PERSON|RAIL|unusual warthogs. ironically sp|
+359|132|8|3|17|17546.21|0.07|0.06|A|F|1995-01-31|1995-03-18|1995-02-10|COLLECT COD|SHIP|sts according to the blithely|
+359|90|1|4|38|37623.42|0.10|0.08|R|F|1995-03-30|1995-01-20|1995-04-25|DELIVER IN PERSON|RAIL|g furiously. regular, sile|
+359|168|5|5|11|11749.76|0.01|0.03|A|F|1995-02-15|1995-01-27|1995-02-18|NONE|FOB|rets wake blithely. slyly final dep|
+359|183|4|6|23|24913.14|0.04|0.07|R|F|1995-01-31|1995-03-11|1995-02-16|DELIVER IN PERSON|REG AIR|ic courts snooze quickly furiously final fo|
+384|179|8|1|38|41008.46|0.07|0.01|R|F|1992-06-02|1992-04-18|1992-06-10|DELIVER IN PERSON|TRUCK|totes cajole blithely against the even|
+384|64|3|2|49|47238.94|0.09|0.07|A|F|1992-04-01|1992-04-25|1992-04-18|COLLECT COD|AIR|refully carefully ironic instructions. bl|
+384|182|3|3|11|11903.98|0.02|0.08|A|F|1992-04-02|1992-04-21|1992-04-15|COLLECT COD|MAIL|ash carefully|
+384|93|6|4|11|10923.99|0.00|0.06|R|F|1992-06-24|1992-05-29|1992-07-22|COLLECT COD|TRUCK|nic excuses are furiously above the blith|
+384|132|8|5|14|14449.82|0.08|0.06|R|F|1992-06-14|1992-05-29|1992-07-05|DELIVER IN PERSON|TRUCK|ckages are slyly after the slyly specia|
+385|167|6|1|7|7470.12|0.05|0.06|N|O|1996-05-23|1996-05-09|1996-06-06|DELIVER IN PERSON|REG AIR| special asymptote|
+385|54|9|2|46|43886.30|0.08|0.07|N|O|1996-03-29|1996-05-17|1996-04-18|NONE|REG AIR|lthily ironic f|
+386|153|5|1|39|41072.85|0.10|0.07|A|F|1995-05-10|1995-02-28|1995-05-25|NONE|SHIP|hely. carefully regular accounts hag|
+386|69|4|2|16|15504.96|0.06|0.01|A|F|1995-04-12|1995-04-18|1995-05-11|DELIVER IN PERSON|MAIL|lithely fluffi|
+386|131|2|3|37|38151.81|0.09|0.04|A|F|1995-05-23|1995-03-01|1995-05-25|TAKE BACK RETURN|MAIL|ending pearls breach fluffily. slyly pen|
+387|137|8|1|1|1037.13|0.08|0.03|N|O|1997-05-06|1997-04-23|1997-05-10|NONE|SHIP| pinto beans wake furiously carefu|
+387|153|4|2|42|44232.30|0.07|0.05|N|O|1997-05-25|1997-02-25|1997-05-29|DELIVER IN PERSON|RAIL|lithely final theodolites.|
+387|97|10|3|40|39883.60|0.09|0.02|N|O|1997-03-08|1997-04-18|1997-03-31|COLLECT COD|TRUCK| quickly ironic platelets are slyly. fluff|
+387|56|7|4|19|18164.95|0.08|0.00|N|O|1997-03-14|1997-04-21|1997-04-04|NONE|REG AIR|gular dependencies|
+387|149|6|5|32|33572.48|0.08|0.06|N|O|1997-05-02|1997-04-11|1997-05-11|DELIVER IN PERSON|TRUCK|gle. silent, fur|
+388|33|9|1|42|39187.26|0.05|0.06|R|F|1993-02-21|1993-02-26|1993-03-15|COLLECT COD|FOB|accounts sleep furiously|
+388|128|9|2|46|47293.52|0.07|0.01|A|F|1993-03-22|1993-01-26|1993-03-24|COLLECT COD|FOB|to beans nag about the careful reque|
+388|65|2|3|40|38602.40|0.06|0.01|A|F|1992-12-24|1993-01-28|1993-01-19|TAKE BACK RETURN|REG AIR|quests against the carefully unusual epi|
+389|190|1|1|2|2180.38|0.09|0.00|R|F|1994-04-13|1994-04-10|1994-04-25|TAKE BACK RETURN|RAIL|fts. courts eat blithely even dependenc|
+390|107|10|1|10|10071.00|0.02|0.05|N|O|1998-05-26|1998-07-06|1998-06-23|TAKE BACK RETURN|SHIP| requests. final accounts x-ray beside the|
+390|124|7|2|17|17410.04|0.09|0.06|N|O|1998-06-07|1998-06-14|1998-07-07|COLLECT COD|SHIP|ending, pending pinto beans wake slyl|
+390|184|5|3|46|49872.28|0.07|0.04|N|O|1998-06-06|1998-05-20|1998-06-14|DELIVER IN PERSON|SHIP|cial excuses. bold, pending packages|
+390|142|3|4|42|43769.88|0.01|0.05|N|O|1998-06-06|1998-06-22|1998-07-05|COLLECT COD|SHIP|counts nag across the sly, sil|
+390|128|3|5|13|13365.56|0.02|0.06|N|O|1998-07-08|1998-05-10|1998-07-18|DELIVER IN PERSON|SHIP|sleep carefully idle packages. blithely |
+390|125|4|6|11|11276.32|0.09|0.06|N|O|1998-05-05|1998-05-15|1998-06-01|DELIVER IN PERSON|SHIP|according to the foxes are furiously |
+390|85|6|7|24|23641.92|0.05|0.02|N|O|1998-04-18|1998-05-19|1998-04-28|TAKE BACK RETURN|AIR|y. enticingly final depos|
+391|122|1|1|14|14309.68|0.09|0.02|R|F|1995-02-11|1995-02-03|1995-02-13|TAKE BACK RETURN|TRUCK| escapades sleep furiously about |
+416|94|6|1|25|24852.25|0.00|0.05|A|F|1993-10-11|1993-11-26|1993-10-21|DELIVER IN PERSON|TRUCK|y final theodolites about|
+416|111|1|2|22|22244.42|0.10|0.00|R|F|1993-12-27|1993-12-17|1994-01-09|COLLECT COD|RAIL|rint blithely above the pending sentim|
+416|175|5|3|25|26879.25|0.07|0.01|R|F|1993-10-16|1993-12-03|1993-10-29|NONE|AIR|ses boost after the bold requests.|
+417|40|1|1|39|36661.56|0.01|0.02|A|F|1994-05-31|1994-05-02|1994-06-06|NONE|SHIP|y regular requests wake along |
+417|70|7|2|18|17461.26|0.00|0.01|R|F|1994-03-29|1994-04-10|1994-04-26|TAKE BACK RETURN|FOB|- final requests sle|
+417|45|2|3|41|38746.64|0.10|0.01|R|F|1994-04-11|1994-03-08|1994-05-06|COLLECT COD|RAIL|tes. regular requests across the |
+417|132|3|4|2|2064.26|0.01|0.03|R|F|1994-02-13|1994-04-19|1994-03-15|DELIVER IN PERSON|SHIP|uriously bol|
+418|19|3|1|31|28489.31|0.00|0.03|N|F|1995-06-05|1995-06-18|1995-06-26|COLLECT COD|FOB|final theodolites. fluffil|
+418|2|5|2|1|902.00|0.04|0.07|N|O|1995-06-23|1995-06-16|1995-07-23|DELIVER IN PERSON|AIR|regular, silent pinto|
+418|35|1|3|3|2805.09|0.04|0.06|N|O|1995-06-29|1995-07-12|1995-07-01|COLLECT COD|AIR|ly furiously regular w|
+419|153|8|1|33|34753.95|0.05|0.02|N|O|1996-11-06|1996-12-25|1996-11-20|TAKE BACK RETURN|TRUCK|y above the bli|
+419|65|2|2|32|30881.92|0.01|0.06|N|O|1996-12-04|1996-12-04|1996-12-24|COLLECT COD|SHIP|blithely regular requests. special pinto|
+419|71|1|3|15|14566.05|0.07|0.04|N|O|1996-12-17|1996-11-28|1996-12-19|TAKE BACK RETURN|REG AIR| sleep final, regular theodolites. fluffi|
+419|9|6|4|15|13635.00|0.01|0.02|N|O|1997-01-09|1996-12-22|1997-01-25|COLLECT COD|FOB|of the careful, thin theodolites. quickly s|
+419|149|2|5|17|17835.38|0.01|0.00|N|O|1997-01-13|1996-12-20|1997-02-01|COLLECT COD|REG AIR|lar dependencies: carefully regu|
+420|101|6|1|5|5005.50|0.04|0.03|N|O|1995-11-04|1996-01-02|1995-11-30|NONE|REG AIR|cajole blit|
+420|162|7|2|22|23367.52|0.05|0.04|N|O|1996-01-25|1995-12-16|1996-02-03|TAKE BACK RETURN|AIR|ly against the blithely re|
+420|48|1|3|45|42661.80|0.09|0.08|N|O|1996-01-14|1996-01-01|1996-01-26|COLLECT COD|FOB| final accounts. furiously express forges|
+420|75|6|4|12|11700.84|0.08|0.08|N|O|1996-02-05|1996-01-03|1996-02-12|TAKE BACK RETURN|REG AIR|c instructions are |
+420|73|2|5|37|36003.59|0.02|0.00|N|O|1995-11-16|1995-12-13|1995-11-19|DELIVER IN PERSON|SHIP|rbits. bold requests along the quickl|
+420|124|7|6|40|40964.80|0.01|0.05|N|O|1995-11-26|1995-12-26|1995-12-20|TAKE BACK RETURN|FOB| after the special|
+420|16|7|7|39|35724.39|0.00|0.08|N|O|1995-12-09|1995-12-16|1995-12-31|DELIVER IN PERSON|REG AIR|s. ironic waters about the car|
+421|134|5|1|1|1034.13|0.02|0.07|R|F|1992-05-29|1992-04-27|1992-06-09|NONE|TRUCK|oldly busy deposit|
+422|152|10|1|25|26303.75|0.10|0.07|N|O|1997-07-01|1997-08-17|1997-07-09|DELIVER IN PERSON|SHIP|carefully bold theodolit|
+422|171|1|2|10|10711.70|0.02|0.03|N|O|1997-06-15|1997-08-04|1997-07-08|TAKE BACK RETURN|AIR|he furiously ironic theodolite|
+422|176|4|3|46|49503.82|0.09|0.00|N|O|1997-06-21|1997-07-14|1997-06-27|DELIVER IN PERSON|RAIL| ideas. qu|
+422|162|7|4|25|26554.00|0.10|0.04|N|O|1997-08-24|1997-07-09|1997-09-22|NONE|FOB|ep along the furiousl|
+423|132|3|1|27|27867.51|0.06|0.03|N|O|1996-08-20|1996-08-01|1996-08-23|TAKE BACK RETURN|SHIP|ccounts. blithely regular pack|
+448|126|7|1|4|4104.48|0.00|0.04|N|O|1995-11-25|1995-10-20|1995-11-26|TAKE BACK RETURN|MAIL|nts thrash quickly among the b|
+448|173|1|2|46|49365.82|0.05|0.00|N|O|1995-08-31|1995-09-30|1995-09-09|COLLECT COD|SHIP| to the fluffily ironic packages.|
+448|27|6|3|35|32445.70|0.10|0.08|N|O|1995-09-27|1995-11-19|1995-10-20|COLLECT COD|REG AIR|ses nag quickly quickly ir|
+448|170|1|4|8|8561.36|0.10|0.00|N|O|1995-11-02|1995-10-16|1995-11-15|COLLECT COD|TRUCK|ounts wake blithely. furiously pending|
+448|138|9|5|23|23876.99|0.02|0.05|N|O|1995-09-26|1995-11-02|1995-10-17|NONE|SHIP|ious, final gifts|
+449|152|7|1|12|12625.80|0.02|0.08|N|O|1995-11-06|1995-08-25|1995-11-18|TAKE BACK RETURN|SHIP|ly. blithely ironic |
+449|109|6|2|4|4036.40|0.10|0.06|N|O|1995-10-27|1995-09-14|1995-11-21|DELIVER IN PERSON|FOB|are fluffily. requests are furiously|
+449|10|1|3|3|2730.03|0.07|0.08|N|O|1995-07-28|1995-09-11|1995-08-01|NONE|RAIL| bold deposits. express theodolites haggle|
+449|158|3|4|22|23279.30|0.07|0.00|N|O|1995-08-17|1995-09-04|1995-09-10|COLLECT COD|FOB|furiously final theodolites eat careful|
+450|162|7|1|42|44610.72|0.03|0.00|N|F|1995-06-07|1995-05-29|1995-06-23|TAKE BACK RETURN|SHIP|y asymptotes. regular depen|
+450|107|8|2|5|5035.50|0.03|0.02|A|F|1995-04-02|1995-05-06|1995-04-13|TAKE BACK RETURN|TRUCK|the pinto bea|
+450|143|6|3|32|33380.48|0.06|0.03|N|O|1995-07-02|1995-04-25|1995-07-30|TAKE BACK RETURN|SHIP| accounts nod fluffily even, pending|
+450|57|9|4|40|38282.00|0.05|0.03|R|F|1995-03-20|1995-05-25|1995-04-14|NONE|RAIL|ve. asymptote|
+450|79|10|5|2|1958.14|0.09|0.00|A|F|1995-03-11|1995-05-21|1995-03-16|COLLECT COD|AIR|y even pinto beans; qui|
+450|153|1|6|33|34753.95|0.08|0.05|R|F|1995-05-18|1995-05-22|1995-05-23|TAKE BACK RETURN|REG AIR|ily carefully final depo|
+451|130|9|1|36|37084.68|0.02|0.06|N|O|1998-06-18|1998-08-14|1998-06-20|TAKE BACK RETURN|AIR|rges can haggle carefully ironic, dogged |
+451|33|4|2|42|39187.26|0.05|0.01|N|O|1998-08-01|1998-08-05|1998-08-30|DELIVER IN PERSON|TRUCK|express excuses. blithely ironic pin|
+451|87|8|3|1|987.08|0.07|0.05|N|O|1998-07-13|1998-07-03|1998-08-04|DELIVER IN PERSON|AIR| carefully ironic packages solve furiously |
+451|77|5|4|28|27357.96|0.04|0.05|N|O|1998-06-16|1998-07-09|1998-06-17|DELIVER IN PERSON|SHIP| theodolites. even cou|
+452|115|6|1|2|2030.22|0.04|0.03|N|O|1997-12-26|1998-01-03|1998-01-12|COLLECT COD|FOB|y express instru|
+453|198|1|1|45|49418.55|0.01|0.00|N|O|1997-06-30|1997-08-20|1997-07-19|COLLECT COD|REG AIR|ifts wake carefully.|
+453|176|4|2|38|40894.46|0.08|0.04|N|O|1997-06-30|1997-07-08|1997-07-16|DELIVER IN PERSON|REG AIR| furiously f|
+453|14|1|3|38|34732.38|0.10|0.01|N|O|1997-08-10|1997-07-24|1997-09-07|NONE|SHIP|sts cajole. furiously un|
+453|96|7|4|45|44824.05|0.10|0.01|N|O|1997-09-18|1997-06-29|1997-10-14|TAKE BACK RETURN|AIR|ironic foxes. slyly pending depos|
+453|26|1|5|32|29632.64|0.04|0.01|N|O|1997-07-15|1997-06-27|1997-07-18|NONE|REG AIR|s. fluffily bold packages cajole. unu|
+453|95|7|6|28|27862.52|0.07|0.07|N|O|1997-08-16|1997-08-12|1997-08-27|NONE|MAIL|final dependencies. slyly special pl|
+454|118|8|1|24|24434.64|0.06|0.01|N|O|1996-04-26|1996-03-23|1996-05-20|NONE|TRUCK|le. deposits after the ideas nag unusual pa|
+455|157|9|1|42|44400.30|0.10|0.02|N|O|1997-01-26|1997-01-10|1997-02-22|DELIVER IN PERSON|REG AIR|around the quickly blit|
+455|28|9|2|44|40832.88|0.05|0.08|N|O|1997-01-17|1997-02-22|1997-02-12|TAKE BACK RETURN|TRUCK| accounts sleep slyly ironic asymptote|
+455|49|2|3|45|42706.80|0.04|0.06|N|O|1996-12-20|1997-01-31|1997-01-07|TAKE BACK RETURN|SHIP|thrash ironically regular packages. qui|
+455|171|9|4|11|11782.87|0.01|0.02|N|O|1997-03-15|1997-02-14|1997-03-26|DELIVER IN PERSON|MAIL|g deposits against the slyly idle foxes u|
+480|53|4|1|22|20967.10|0.04|0.02|A|F|1993-06-16|1993-07-28|1993-07-09|NONE|MAIL|into beans cajole furiously. accounts s|
+481|19|9|1|17|15623.17|0.07|0.05|A|F|1992-10-21|1992-12-09|1992-11-19|DELIVER IN PERSON|MAIL|. quickly final accounts among the |
+481|21|2|2|19|17499.38|0.08|0.01|R|F|1993-01-09|1992-11-27|1993-01-14|TAKE BACK RETURN|AIR|p blithely after t|
+481|186|7|3|42|45619.56|0.08|0.08|A|F|1992-11-27|1992-11-11|1992-12-08|COLLECT COD|RAIL|mptotes are furiously among the iron|
+481|82|3|4|11|10802.88|0.05|0.06|A|F|1993-01-12|1992-11-17|1993-02-05|NONE|FOB|eful attai|
+481|112|9|5|31|31375.41|0.05|0.01|A|F|1993-01-15|1992-12-31|1993-01-21|DELIVER IN PERSON|AIR|usly final packages believe. quick|
+482|138|9|1|32|33220.16|0.00|0.02|N|O|1996-05-22|1996-05-14|1996-05-29|NONE|SHIP|usual deposits affix against |
+482|122|5|2|1|1022.12|0.05|0.08|N|O|1996-05-29|1996-05-20|1996-05-31|COLLECT COD|AIR|es. quickly ironic escapades sleep furious|
+482|62|9|3|31|29823.86|0.04|0.03|N|O|1996-06-01|1996-05-06|1996-06-17|NONE|MAIL| blithe pin|
+482|196|7|4|8|8769.52|0.02|0.05|N|O|1996-04-19|1996-05-05|1996-04-21|NONE|TRUCK|tructions near the final, regular ideas de|
+482|39|10|5|46|43195.38|0.01|0.06|N|O|1996-07-19|1996-06-05|1996-08-10|NONE|MAIL|furiously thin realms. final, fina|
+482|79|10|6|19|18602.33|0.04|0.00|N|O|1996-03-27|1996-04-25|1996-04-15|NONE|FOB|ts hinder carefully silent requests|
+483|33|9|1|8|7464.24|0.00|0.08|N|O|1995-08-22|1995-08-23|1995-09-18|COLLECT COD|RAIL|osits. carefully fin|
+483|80|1|2|23|22541.84|0.04|0.06|N|O|1995-07-20|1995-08-11|1995-08-04|DELIVER IN PERSON|MAIL|requests was quickly against th|
+483|88|9|3|9|8892.72|0.04|0.03|N|O|1995-09-10|1995-09-02|1995-09-13|NONE|AIR| carefully express ins|
+484|31|2|1|49|45620.47|0.10|0.02|N|O|1997-03-06|1997-02-28|1997-03-23|COLLECT COD|TRUCK|ven accounts|
+484|32|8|2|45|41941.35|0.06|0.07|N|O|1997-04-09|1997-03-20|1997-04-19|DELIVER IN PERSON|TRUCK|usly final excuses boost slyly blithe|
+484|184|5|3|50|54209.00|0.06|0.05|N|O|1997-01-24|1997-03-27|1997-02-22|DELIVER IN PERSON|MAIL|uctions wake. final, silent requests haggle|
+484|165|6|4|22|23433.52|0.07|0.03|N|O|1997-04-29|1997-03-26|1997-05-17|TAKE BACK RETURN|SHIP|es are pending instructions. furiously unu|
+484|77|6|5|48|46899.36|0.00|0.05|N|O|1997-03-05|1997-02-08|1997-03-22|TAKE BACK RETURN|MAIL|l, bold packages? even mult|
+484|97|9|6|10|9970.90|0.01|0.08|N|O|1997-04-06|1997-02-14|1997-04-16|COLLECT COD|FOB|x fluffily carefully regular|
+485|150|1|1|50|52507.50|0.01|0.00|N|O|1997-03-28|1997-05-26|1997-04-18|TAKE BACK RETURN|MAIL|iously quick excuses. carefully final f|
+485|28|7|2|40|37120.80|0.08|0.01|N|O|1997-04-29|1997-05-08|1997-04-30|TAKE BACK RETURN|TRUCK|al escapades|
+485|137|3|3|22|22816.86|0.00|0.05|N|O|1997-04-06|1997-04-27|1997-05-01|DELIVER IN PERSON|TRUCK|refully final notornis haggle according |
+486|76|7|1|36|35138.52|0.00|0.01|N|O|1996-06-25|1996-05-06|1996-07-07|COLLECT COD|AIR|deposits around the quickly regular packa|
+486|68|9|2|40|38722.40|0.03|0.08|N|O|1996-05-21|1996-06-06|1996-06-07|COLLECT COD|SHIP|ts nag quickly among the slyl|
+486|136|2|3|26|26939.38|0.04|0.03|N|O|1996-03-16|1996-05-25|1996-03-31|NONE|RAIL|forges along the |
+486|72|1|4|38|36938.66|0.08|0.05|N|O|1996-05-07|1996-04-26|1996-05-26|TAKE BACK RETURN|TRUCK| blithely final pinto |
+486|29|2|5|3|2787.06|0.07|0.05|N|O|1996-07-07|1996-04-20|1996-07-23|DELIVER IN PERSON|RAIL|ccounts ha|
+486|47|4|6|46|43563.84|0.00|0.03|N|O|1996-04-18|1996-05-02|1996-04-20|COLLECT COD|AIR|theodolites eat carefully furious|
+487|92|3|1|47|46628.23|0.06|0.06|R|F|1992-09-30|1992-10-08|1992-10-24|NONE|TRUCK|tions. blithely reg|
+487|83|4|2|2|1966.16|0.02|0.06|R|F|1992-10-19|1992-11-04|1992-11-11|COLLECT COD|TRUCK|oss the unusual pinto beans. reg|
+512|189|10|1|19|20694.42|0.08|0.05|N|O|1995-07-12|1995-07-11|1995-08-04|COLLECT COD|MAIL| sleep. requests alongside of the fluff|
+512|23|2|2|37|34151.74|0.01|0.04|N|O|1995-06-20|1995-07-05|1995-07-16|NONE|RAIL|nic depths cajole? blithely b|
+512|180|1|3|40|43207.20|0.05|0.02|N|O|1995-07-06|1995-07-08|1995-07-08|COLLECT COD|TRUCK|quests are da|
+512|83|4|4|10|9830.80|0.09|0.02|N|O|1995-09-16|1995-07-29|1995-10-07|NONE|AIR|xes. pinto beans cajole carefully; |
+512|65|6|5|6|5790.36|0.03|0.05|R|F|1995-06-10|1995-06-21|1995-06-16|DELIVER IN PERSON|FOB|en ideas haggle |
+512|33|9|6|12|11196.36|0.04|0.00|R|F|1995-05-21|1995-08-03|1995-06-09|COLLECT COD|FOB|old furiously express deposits. specia|
+512|51|9|7|2|1902.10|0.09|0.08|N|O|1995-06-19|1995-08-13|1995-06-24|NONE|TRUCK|e slyly silent accounts serve with|
+513|62|7|1|20|19241.20|0.09|0.07|N|O|1995-07-12|1995-05-31|1995-07-31|NONE|AIR|efully ironic ideas doze slyl|
+513|122|5|2|44|44973.28|0.01|0.01|N|O|1995-07-14|1995-07-14|1995-08-12|NONE|MAIL|kages sleep boldly ironic theodolites. acco|
+514|79|9|1|21|20560.47|0.06|0.02|N|O|1996-06-09|1996-05-15|1996-07-07|DELIVER IN PERSON|RAIL|s sleep quickly blithely|
+514|118|2|2|34|34615.74|0.08|0.02|N|O|1996-04-14|1996-06-03|1996-04-23|COLLECT COD|REG AIR|ily even patterns. bold, silent instruc|
+514|13|7|3|6|5478.06|0.06|0.01|N|O|1996-05-30|1996-06-04|1996-06-28|COLLECT COD|SHIP|as haggle blithely; quickly s|
+514|116|7|4|43|43692.73|0.00|0.08|N|O|1996-06-07|1996-05-14|1996-07-01|TAKE BACK RETURN|FOB|thely regular |
+515|105|8|1|10|10051.00|0.03|0.02|A|F|1993-10-04|1993-11-03|1993-10-08|NONE|FOB|ar deposits th|
+515|148|1|2|38|39829.32|0.10|0.07|A|F|1993-09-19|1993-11-12|1993-10-03|DELIVER IN PERSON|SHIP|ays. furiously express requests haggle furi|
+515|183|4|3|11|11914.98|0.00|0.02|R|F|1993-09-04|1993-10-02|1993-09-05|DELIVER IN PERSON|FOB|ly pending accounts haggle blithel|
+515|109|10|4|34|34309.40|0.09|0.03|R|F|1993-10-03|1993-10-26|1993-10-15|DELIVER IN PERSON|REG AIR|ic dependencie|
+515|131|7|5|32|32996.16|0.01|0.07|R|F|1993-10-10|1993-10-08|1993-11-02|TAKE BACK RETURN|FOB|r sauternes boost. final theodolites wake a|
+515|109|4|6|25|25227.50|0.04|0.08|R|F|1993-11-14|1993-11-07|1993-12-03|DELIVER IN PERSON|MAIL|e packages engag|
+516|25|4|1|11|10175.22|0.01|0.06|N|O|1998-05-02|1998-05-23|1998-05-12|DELIVER IN PERSON|FOB|ongside of the blithely final reque|
+517|45|6|1|28|26461.12|0.03|0.02|N|O|1997-04-30|1997-05-18|1997-05-17|COLLECT COD|MAIL| requests. special, fi|
+517|156|4|2|15|15842.25|0.02|0.00|N|O|1997-04-09|1997-06-26|1997-05-01|NONE|TRUCK| slyly. express requests ar|
+517|41|8|3|9|8469.36|0.04|0.00|N|O|1997-05-03|1997-06-16|1997-05-24|COLLECT COD|SHIP| slyly stealthily express instructions. |
+517|133|4|4|11|11364.43|0.06|0.02|N|O|1997-06-20|1997-06-01|1997-06-27|NONE|REG AIR|ly throughout the fu|
+517|24|3|5|23|21252.46|0.00|0.01|N|O|1997-04-19|1997-05-07|1997-05-12|COLLECT COD|RAIL| kindle. furiously bold requests mus|
+518|165|6|1|30|31954.80|0.07|0.05|N|O|1998-02-18|1998-03-27|1998-03-16|COLLECT COD|TRUCK|slyly by the packages. carefull|
+518|84|5|2|23|22633.84|0.05|0.07|N|O|1998-02-20|1998-05-05|1998-03-11|COLLECT COD|TRUCK| special requests. fluffily ironic re|
+518|134|5|3|12|12409.56|0.01|0.06|N|O|1998-03-08|1998-03-31|1998-04-06|NONE|AIR| packages thrash slyly|
+518|122|3|4|46|47017.52|0.07|0.02|N|O|1998-04-07|1998-04-17|1998-04-29|NONE|MAIL|. blithely even ideas cajole furiously. b|
+518|71|2|5|16|15537.12|0.01|0.01|N|O|1998-03-15|1998-03-24|1998-04-08|NONE|MAIL|use quickly expre|
+518|197|10|6|39|42790.41|0.09|0.08|N|O|1998-02-26|1998-03-17|1998-03-21|DELIVER IN PERSON|FOB| the bold, special deposits are carefully |
+518|186|7|7|48|52136.64|0.03|0.07|N|O|1998-03-06|1998-04-22|1998-03-14|NONE|FOB| slyly final platelets; quickly even deposi|
+519|159|4|1|1|1059.15|0.07|0.07|N|O|1997-12-01|1998-01-26|1997-12-23|COLLECT COD|REG AIR|bold requests believe furiou|
+519|3|4|2|38|34314.00|0.05|0.08|N|O|1998-02-19|1997-12-15|1998-03-19|DELIVER IN PERSON|FOB|gular excuses detect quickly furiously |
+519|106|1|3|19|19115.90|0.00|0.02|N|O|1998-01-09|1998-01-03|1998-02-06|COLLECT COD|AIR|asymptotes. p|
+519|47|6|4|27|25570.08|0.08|0.06|N|O|1997-11-20|1997-12-06|1997-12-16|DELIVER IN PERSON|REG AIR|le. even, final dependencies|
+519|10|5|5|13|11830.13|0.06|0.08|N|O|1998-02-06|1997-12-02|1998-03-03|TAKE BACK RETURN|TRUCK|c accounts wake along the ironic so|
+519|151|6|6|3|3153.45|0.04|0.00|N|O|1998-02-01|1998-01-25|1998-02-27|TAKE BACK RETURN|FOB|erve blithely blithely ironic asymp|
+544|139|10|1|47|48839.11|0.08|0.06|R|F|1993-03-14|1993-03-27|1993-03-27|COLLECT COD|SHIP|ecial pains. deposits grow foxes. |
+545|170|1|1|4|4280.68|0.02|0.00|N|O|1996-02-23|1995-12-16|1996-03-21|DELIVER IN PERSON|FOB|, ironic grouches cajole over|
+545|171|10|2|18|19281.06|0.00|0.00|N|O|1996-02-21|1996-01-17|1996-02-26|NONE|RAIL|al, final packages affix. even a|
+546|85|6|1|16|15761.28|0.08|0.02|N|O|1997-02-04|1996-12-30|1997-02-25|DELIVER IN PERSON|TRUCK|de of the orbits. sometimes regula|
+547|71|10|1|44|42727.08|0.08|0.08|N|O|1996-10-18|1996-08-17|1996-10-27|TAKE BACK RETURN|FOB|thely express dependencies. qu|
+547|137|8|2|48|49782.24|0.01|0.04|N|O|1996-10-21|1996-08-04|1996-11-20|COLLECT COD|SHIP|thely specia|
+547|182|3|3|3|3246.54|0.05|0.02|N|O|1996-09-04|1996-08-01|1996-09-21|COLLECT COD|SHIP|pinto beans. ironi|
+548|197|8|1|2|2194.38|0.06|0.05|A|F|1994-11-26|1994-11-06|1994-12-06|COLLECT COD|MAIL|ests haggle quickly eve|
+548|5|6|2|6|5430.00|0.00|0.08|A|F|1995-01-18|1994-12-08|1995-02-10|NONE|TRUCK|sits wake furiously regular|
+548|1|8|3|21|18921.00|0.03|0.08|A|F|1995-01-13|1994-12-18|1995-01-25|NONE|AIR|ideas. special accounts above the furiou|
+548|57|9|4|21|20098.05|0.08|0.03|A|F|1994-10-27|1994-12-04|1994-11-21|DELIVER IN PERSON|AIR| engage quickly. regular theo|
+548|93|7|5|19|18868.71|0.00|0.02|A|F|1994-09-24|1994-11-24|1994-10-01|DELIVER IN PERSON|MAIL|courts boost care|
+548|153|8|6|32|33700.80|0.06|0.04|A|F|1994-12-16|1994-11-20|1994-12-29|NONE|REG AIR|c instruction|
+549|196|9|1|18|19731.42|0.07|0.04|R|F|1992-10-19|1992-08-12|1992-11-13|COLLECT COD|REG AIR|furiously according to the ironic, regular |
+549|189|10|2|38|41388.84|0.07|0.05|A|F|1992-08-17|1992-08-28|1992-09-05|COLLECT COD|RAIL|the regular, furious excuses. carefu|
+549|66|7|3|36|34778.16|0.08|0.04|R|F|1992-09-11|1992-10-11|1992-09-12|DELIVER IN PERSON|AIR|ts against the ironic, even theodolites eng|
+549|21|4|4|18|16578.36|0.09|0.01|A|F|1992-07-31|1992-09-11|1992-08-08|NONE|RAIL|ely regular accounts above the |
+549|24|7|5|38|35112.76|0.06|0.02|R|F|1992-08-23|1992-08-12|1992-08-25|COLLECT COD|REG AIR|eposits. carefully regular depos|
+550|191|3|1|31|33826.89|0.04|0.02|N|O|1995-10-24|1995-09-27|1995-11-04|COLLECT COD|AIR|thely silent packages. unusual|
+551|24|9|1|8|7392.16|0.08|0.02|N|O|1995-07-29|1995-07-18|1995-08-02|NONE|REG AIR| wake quickly slyly pending platel|
+551|159|4|2|20|21183.00|0.00|0.07|N|O|1995-09-18|1995-08-25|1995-10-11|COLLECT COD|TRUCK|r ideas. final, even ideas hinder alongside|
+551|162|9|3|16|16994.56|0.07|0.06|N|O|1995-07-29|1995-08-19|1995-08-10|COLLECT COD|MAIL|y along the carefully ex|
+576|87|8|1|2|1974.16|0.07|0.01|N|O|1997-05-15|1997-06-30|1997-05-28|NONE|RAIL|ccounts along the ac|
+576|34|5|2|6|5604.18|0.06|0.05|N|O|1997-05-15|1997-07-26|1997-06-03|DELIVER IN PERSON|TRUCK|al deposits. slyly even sauternes a|
+576|37|3|3|6|5622.18|0.08|0.07|N|O|1997-08-28|1997-06-16|1997-09-25|DELIVER IN PERSON|FOB|ts. ironic multipliers |
+576|138|9|4|5|5190.65|0.03|0.07|N|O|1997-06-11|1997-06-17|1997-07-05|NONE|REG AIR|l foxes boost slyly. accounts af|
+577|26|5|1|25|23150.50|0.06|0.01|A|F|1995-04-09|1995-02-20|1995-05-09|TAKE BACK RETURN|AIR|ve slyly of the frets. careful|
+577|64|1|2|14|13496.84|0.08|0.03|R|F|1995-03-19|1995-02-25|1995-04-09|DELIVER IN PERSON|RAIL|l accounts wake deposits. ironic packa|
+578|156|7|1|40|42246.00|0.02|0.08|N|O|1997-02-10|1997-03-18|1997-02-11|NONE|SHIP|usly even platel|
+578|188|9|2|23|25028.14|0.05|0.08|N|O|1997-03-06|1997-03-03|1997-03-20|TAKE BACK RETURN|FOB|nstructions. ironic deposits|
+579|151|6|1|9|9460.35|0.00|0.05|N|O|1998-06-20|1998-04-28|1998-07-19|DELIVER IN PERSON|RAIL|e ironic, express deposits are furiously|
+579|33|4|2|39|36388.17|0.02|0.01|N|O|1998-06-21|1998-06-03|1998-06-26|COLLECT COD|REG AIR|ncies. furiously final r|
+579|60|5|3|6|5760.36|0.03|0.00|N|O|1998-04-24|1998-05-03|1998-05-08|TAKE BACK RETURN|TRUCK|ickly final requests-- bold accou|
+579|7|10|4|41|37187.00|0.04|0.05|N|O|1998-05-28|1998-05-01|1998-06-04|COLLECT COD|REG AIR|bold, express requests sublate slyly. blith|
+579|13|7|5|28|25564.28|0.00|0.03|N|O|1998-07-10|1998-05-24|1998-07-19|NONE|RAIL|ic ideas until th|
+579|167|6|6|5|5335.80|0.05|0.08|N|O|1998-05-02|1998-04-25|1998-05-05|COLLECT COD|REG AIR|refully silent ideas cajole furious|
+580|85|6|1|33|32507.64|0.03|0.05|N|O|1997-10-11|1997-09-19|1997-10-16|TAKE BACK RETURN|FOB|y express theodolites cajole carefully |
+580|174|5|2|31|33299.27|0.04|0.08|N|O|1997-10-04|1997-09-08|1997-10-15|COLLECT COD|FOB|ose alongside of the sl|
+580|185|6|3|19|20618.42|0.04|0.04|N|O|1997-07-23|1997-09-21|1997-08-15|NONE|FOB|mong the special packag|
+581|64|1|1|41|39526.46|0.09|0.07|N|O|1997-05-26|1997-04-06|1997-06-10|TAKE BACK RETURN|MAIL|nts. quickly|
+581|93|5|2|14|13903.26|0.06|0.08|N|O|1997-05-17|1997-04-14|1997-06-08|NONE|MAIL|. deposits s|
+581|101|6|3|49|49053.90|0.10|0.02|N|O|1997-02-27|1997-04-24|1997-03-10|TAKE BACK RETURN|MAIL|. slyly regular pinto beans acr|
+581|75|4|4|30|29252.10|0.10|0.08|N|O|1997-06-19|1997-05-21|1997-06-22|TAKE BACK RETURN|TRUCK| regular ideas grow furio|
+582|57|9|1|7|6699.35|0.07|0.00|N|O|1997-11-16|1997-11-29|1997-12-10|TAKE BACK RETURN|FOB|ithely unusual t|
+582|51|2|2|49|46601.45|0.05|0.03|N|O|1997-12-17|1998-01-12|1997-12-31|COLLECT COD|REG AIR|nts according to the furiously regular pin|
+582|141|4|3|42|43727.88|0.07|0.00|N|O|1997-11-15|1997-12-21|1997-12-03|COLLECT COD|SHIP|iously beside the silent de|
+582|168|9|4|36|38453.76|0.06|0.01|N|O|1997-12-09|1997-11-27|1997-12-26|TAKE BACK RETURN|SHIP|lar requests. quickly |
+583|145|6|1|1|1045.14|0.07|0.07|N|O|1997-06-17|1997-04-29|1997-06-28|NONE|TRUCK| regular, regular ideas. even, bra|
+583|120|4|2|47|47945.64|0.10|0.06|N|O|1997-07-14|1997-05-12|1997-08-11|DELIVER IN PERSON|AIR|nts are fluffily. furiously even re|
+583|130|5|3|34|35024.42|0.01|0.02|N|O|1997-05-11|1997-04-24|1997-06-03|DELIVER IN PERSON|MAIL|express req|
+583|142|1|4|33|34390.62|0.10|0.01|N|O|1997-05-28|1997-04-25|1997-06-24|NONE|AIR|kages cajole slyly across the|
+583|189|10|5|13|14159.34|0.04|0.06|N|O|1997-06-23|1997-05-29|1997-07-08|COLLECT COD|TRUCK|y sly theodolites. ironi|
+608|154|6|1|19|20028.85|0.08|0.06|N|O|1996-04-19|1996-05-02|1996-05-03|DELIVER IN PERSON|RAIL|ideas. the|
+608|198|1|2|40|43927.60|0.03|0.01|N|O|1996-05-21|1996-04-11|1996-06-02|NONE|AIR| alongside of the regular tithes. sly|
+609|66|5|1|21|20287.26|0.01|0.05|R|F|1994-08-24|1994-08-23|1994-08-27|DELIVER IN PERSON|FOB|de of the special warthogs. excu|
+610|111|8|1|49|49544.39|0.10|0.07|N|O|1995-08-29|1995-10-26|1995-09-12|TAKE BACK RETURN|SHIP|ular instruc|
+610|68|3|2|11|10648.66|0.07|0.08|N|O|1995-10-31|1995-10-25|1995-11-18|NONE|MAIL|blithely final |
+610|118|9|3|26|26470.86|0.09|0.04|N|O|1995-11-22|1995-09-09|1995-12-04|TAKE BACK RETURN|AIR|cross the furiously even theodolites sl|
+610|186|7|4|17|18465.06|0.03|0.03|N|O|1995-11-01|1995-10-30|1995-11-04|COLLECT COD|FOB|p quickly instead of the slyly pending foxe|
+610|146|7|5|39|40799.46|0.08|0.05|N|O|1995-10-30|1995-10-21|1995-11-11|TAKE BACK RETURN|REG AIR|counts. ironic warhorses are |
+610|95|7|6|5|4975.45|0.00|0.07|N|O|1995-08-11|1995-10-22|1995-08-26|TAKE BACK RETURN|FOB|n pinto beans. iro|
+610|190|1|7|27|29435.13|0.06|0.03|N|O|1995-09-02|1995-09-19|1995-09-15|NONE|REG AIR| ironic pinto beans haggle. blithe|
+611|17|7|1|39|35763.39|0.05|0.06|R|F|1993-05-06|1993-04-09|1993-05-22|TAKE BACK RETURN|SHIP|nto beans |
+611|81|2|2|1|981.08|0.08|0.07|R|F|1993-05-17|1993-02-26|1993-06-15|DELIVER IN PERSON|MAIL|ts. pending platelets aff|
+611|120|4|3|39|39784.68|0.09|0.02|A|F|1993-03-10|1993-03-10|1993-03-17|TAKE BACK RETURN|TRUCK|the evenly bold requests. furious|
+612|185|6|1|5|5425.90|0.07|0.00|R|F|1992-11-08|1992-11-20|1992-12-03|TAKE BACK RETURN|RAIL|structions. q|
+612|195|7|2|28|30665.32|0.07|0.06|R|F|1993-01-02|1992-12-11|1993-01-30|DELIVER IN PERSON|TRUCK|regular instructions affix bl|
+612|67|4|3|49|47385.94|0.00|0.08|A|F|1993-01-08|1992-11-25|1993-01-17|TAKE BACK RETURN|REG AIR|theodolite|
+612|39|5|4|28|26292.84|0.05|0.00|A|F|1992-11-12|1992-12-05|1992-12-02|TAKE BACK RETURN|REG AIR|lyly regular asym|
+612|88|9|5|1|988.08|0.08|0.04|R|F|1992-12-18|1992-12-13|1992-12-20|TAKE BACK RETURN|FOB| requests.|
+612|189|10|6|33|35942.94|0.10|0.03|R|F|1992-11-30|1992-12-01|1992-12-12|COLLECT COD|MAIL|bove the blithely even ideas. careful|
+613|91|5|1|17|16848.53|0.06|0.06|N|O|1995-09-23|1995-08-04|1995-10-15|NONE|SHIP|ar dependencie|
+613|79|7|2|6|5874.42|0.05|0.05|N|O|1995-08-05|1995-08-09|1995-08-08|TAKE BACK RETURN|MAIL|y ironic deposits eat |
+613|186|7|3|3|3258.54|0.03|0.01|N|O|1995-09-27|1995-09-11|1995-10-05|NONE|TRUCK|ccounts cajole. |
+613|159|10|4|7|7414.05|0.02|0.04|N|O|1995-09-07|1995-08-02|1995-09-16|DELIVER IN PERSON|MAIL|ously blithely final pinto beans. regula|
+614|195|8|1|21|22998.99|0.00|0.03|R|F|1993-03-29|1993-01-06|1993-04-16|TAKE BACK RETURN|TRUCK|arefully. slyly express packag|
+614|187|8|2|48|52184.64|0.07|0.07|A|F|1993-03-09|1993-01-19|1993-03-19|DELIVER IN PERSON|SHIP|riously special excuses haggle along the|
+614|167|2|3|43|45887.88|0.05|0.00|A|F|1993-03-07|1993-02-22|1993-03-18|DELIVER IN PERSON|SHIP| express accounts wake. slyly ironic ins|
+614|147|6|4|14|14659.96|0.04|0.06|A|F|1992-12-03|1993-02-14|1992-12-27|DELIVER IN PERSON|SHIP|ular packages haggle about the pack|
+614|196|8|5|30|32885.70|0.08|0.07|R|F|1993-01-16|1993-02-08|1993-02-12|TAKE BACK RETURN|FOB|tructions are f|
+614|137|8|6|48|49782.24|0.04|0.08|A|F|1992-12-14|1993-01-22|1993-01-11|NONE|TRUCK| regular platelets cajole quickly eve|
+615|105|6|1|36|36183.60|0.10|0.01|A|F|1992-06-01|1992-07-14|1992-06-27|NONE|FOB| packages. carefully final pinto bea|
+640|93|7|1|49|48661.41|0.09|0.02|R|F|1993-03-27|1993-04-17|1993-04-15|NONE|RAIL|s haggle slyly|
+640|1|4|2|40|36040.00|0.09|0.05|A|F|1993-05-11|1993-04-11|1993-05-15|COLLECT COD|TRUCK|oach according to the bol|
+640|180|8|3|22|23763.96|0.05|0.07|A|F|1993-05-07|1993-04-14|1993-05-21|TAKE BACK RETURN|TRUCK|osits across the slyly regular theodo|
+640|32|3|4|45|41941.35|0.07|0.07|R|F|1993-04-15|1993-04-23|1993-04-21|DELIVER IN PERSON|REG AIR|ong the qui|
+641|126|9|1|18|18470.16|0.01|0.08|R|F|1993-10-17|1993-10-11|1993-10-29|DELIVER IN PERSON|AIR|p blithely bold packages. quick|
+641|100|2|2|1|1000.10|0.09|0.02|R|F|1993-12-03|1993-10-28|1993-12-26|TAKE BACK RETURN|RAIL| nag across the regular foxes.|
+641|95|7|3|40|39803.60|0.05|0.06|R|F|1993-11-22|1993-10-20|1993-12-11|DELIVER IN PERSON|REG AIR|lets. furiously regular requests cajo|
+641|71|10|4|25|24276.75|0.03|0.02|A|F|1993-12-04|1993-11-18|1993-12-18|TAKE BACK RETURN|FOB|d, regular d|
+641|4|9|5|41|37064.00|0.07|0.04|R|F|1993-11-29|1993-10-27|1993-12-04|TAKE BACK RETURN|FOB| asymptotes are quickly. bol|
+642|54|5|1|26|24805.30|0.10|0.03|A|F|1994-04-16|1994-02-01|1994-04-27|COLLECT COD|REG AIR|quests according to the unu|
+643|13|3|1|28|25564.28|0.00|0.08|A|F|1995-04-13|1995-05-12|1995-04-14|TAKE BACK RETURN|TRUCK|ly regular requests nag sly|
+643|51|2|2|48|45650.40|0.01|0.02|N|O|1995-07-10|1995-06-07|1995-08-01|NONE|FOB|ly ironic accounts|
+643|163|2|3|23|24452.68|0.05|0.03|N|O|1995-07-09|1995-05-18|1995-07-31|COLLECT COD|RAIL|sits are carefully according to the e|
+643|45|4|4|39|36856.56|0.08|0.04|A|F|1995-06-08|1995-06-16|1995-06-13|COLLECT COD|RAIL| the pains. carefully s|
+643|190|1|5|47|51238.93|0.10|0.03|R|F|1995-04-05|1995-06-14|1995-04-26|DELIVER IN PERSON|RAIL|y against |
+644|134|10|1|46|47569.98|0.02|0.01|A|F|1992-05-20|1992-06-14|1992-06-14|DELIVER IN PERSON|RAIL| special requests was sometimes expre|
+644|130|3|2|11|11331.43|0.05|0.02|A|F|1992-08-20|1992-07-21|1992-09-11|TAKE BACK RETURN|TRUCK|ealthy pinto beans use carefu|
+644|101|6|3|44|44048.40|0.04|0.04|R|F|1992-08-17|1992-07-26|1992-08-20|COLLECT COD|REG AIR|iously ironic pinto beans. bold packa|
+644|80|8|4|7|6860.56|0.01|0.02|A|F|1992-05-18|1992-07-01|1992-06-07|COLLECT COD|RAIL| regular requests are blithely. slyly|
+644|50|1|5|23|21851.15|0.02|0.04|R|F|1992-07-31|1992-07-28|1992-08-13|DELIVER IN PERSON|TRUCK|uctions nag quickly alongside of t|
+644|85|6|6|33|32507.64|0.00|0.07|R|F|1992-08-26|1992-07-27|1992-08-28|NONE|AIR|ages sleep. bold, bo|
+644|51|9|7|38|36139.90|0.08|0.06|R|F|1992-05-17|1992-07-10|1992-06-06|TAKE BACK RETURN|MAIL| packages. blithely slow accounts nag quic|
+645|160|8|1|33|34985.28|0.01|0.02|A|F|1994-12-09|1995-02-21|1995-01-03|NONE|TRUCK|heodolites b|
+645|170|1|2|47|50297.99|0.07|0.05|R|F|1995-02-16|1995-02-15|1995-02-25|COLLECT COD|TRUCK|hely regular instructions alon|
+645|70|7|3|46|44623.22|0.10|0.01|A|F|1995-01-04|1995-02-21|1995-01-21|COLLECT COD|REG AIR| regular dependencies across the speci|
+645|96|9|4|49|48808.41|0.05|0.03|R|F|1995-01-24|1995-01-06|1995-02-17|NONE|TRUCK|y. slyly iron|
+645|5|8|5|43|38915.00|0.06|0.02|A|F|1995-02-12|1995-02-27|1995-03-06|TAKE BACK RETURN|REG AIR| furiously accounts. slyly|
+645|34|5|6|18|16812.54|0.10|0.08|A|F|1995-03-02|1995-02-08|1995-03-03|COLLECT COD|RAIL|ep. slyly even |
+645|28|9|7|9|8352.18|0.03|0.03|A|F|1994-12-25|1995-01-04|1995-01-15|COLLECT COD|REG AIR|special deposits. regular, final th|
+646|109|6|1|31|31282.10|0.00|0.05|R|F|1994-12-17|1995-02-16|1995-01-04|COLLECT COD|MAIL|ag furiousl|
+646|127|8|2|1|1027.12|0.07|0.01|A|F|1994-12-05|1995-01-07|1994-12-31|TAKE BACK RETURN|MAIL|t blithely regular deposits. quic|
+646|30|9|3|24|22320.72|0.06|0.02|A|F|1995-02-20|1994-12-30|1995-03-16|TAKE BACK RETURN|TRUCK|regular accounts haggle dog|
+646|99|2|4|34|33969.06|0.01|0.00|R|F|1994-12-28|1994-12-27|1994-12-31|COLLECT COD|SHIP|slow accounts. fluffily idle instructions|
+646|90|1|5|17|16831.53|0.04|0.01|A|F|1994-12-31|1994-12-26|1995-01-01|DELIVER IN PERSON|REG AIR|inal packages haggle carefully |
+646|115|2|6|40|40604.40|0.10|0.01|R|F|1995-01-01|1995-01-13|1995-01-11|COLLECT COD|TRUCK|ronic packages sleep across th|
+647|17|1|1|41|37597.41|0.08|0.08|N|O|1997-11-19|1997-09-24|1997-12-15|COLLECT COD|REG AIR|r instructions. quickly unusu|
+647|113|10|2|5|5065.55|0.10|0.00|N|O|1997-09-25|1997-09-22|1997-10-25|TAKE BACK RETURN|AIR|ly express packages haggle caref|
+647|153|8|3|15|15797.25|0.08|0.00|N|O|1997-09-23|1997-10-09|1997-10-21|NONE|MAIL|ve the even, bold foxes sleep |
+672|173|4|1|41|43999.97|0.06|0.06|R|F|1994-06-20|1994-07-03|1994-06-22|COLLECT COD|REG AIR| dependencies in|
+672|190|1|2|9|9811.71|0.03|0.04|R|F|1994-06-25|1994-06-06|1994-07-19|TAKE BACK RETURN|TRUCK|haggle carefully carefully reg|
+672|143|2|3|35|36509.90|0.02|0.01|R|F|1994-07-13|1994-06-04|1994-07-14|COLLECT COD|RAIL| dependencies haggle quickly. theo|
+673|71|10|1|22|21363.54|0.03|0.02|R|F|1994-03-15|1994-04-27|1994-03-29|TAKE BACK RETURN|TRUCK| the regular, even requests. carefully fin|
+674|102|5|1|23|23048.30|0.06|0.07|A|F|1992-10-25|1992-10-15|1992-11-03|COLLECT COD|SHIP|ve the quickly even deposits. blithe|
+674|59|4|2|4|3836.20|0.02|0.07|R|F|1992-10-05|1992-11-22|1992-10-22|NONE|RAIL|ly express pinto beans sleep car|
+675|157|9|1|1|1057.15|0.04|0.08|N|O|1997-11-27|1997-09-30|1997-12-12|DELIVER IN PERSON|REG AIR|ide of the slyly regular packages. unus|
+675|137|3|2|35|36299.55|0.08|0.07|N|O|1997-08-19|1997-10-16|1997-09-17|DELIVER IN PERSON|REG AIR|s. furiously expre|
+675|176|6|3|34|36589.78|0.10|0.04|N|O|1997-11-17|1997-10-07|1997-11-27|NONE|FOB|y final accounts unwind around the |
+675|100|4|4|15|15001.50|0.09|0.05|N|O|1997-10-18|1997-09-28|1997-11-13|COLLECT COD|TRUCK|posits after the furio|
+675|5|8|5|46|41630.00|0.09|0.05|N|O|1997-09-18|1997-10-14|1997-10-01|DELIVER IN PERSON|AIR| deposits along the express foxes |
+676|51|3|1|9|8559.45|0.09|0.02|N|O|1997-04-03|1997-02-02|1997-04-08|COLLECT COD|REG AIR|aintain sl|
+676|78|6|2|20|19561.40|0.07|0.07|N|O|1997-02-02|1997-02-01|1997-02-11|NONE|REG AIR|riously around the blithely |
+676|163|4|3|35|37210.60|0.05|0.01|N|O|1996-12-30|1997-01-13|1997-01-19|DELIVER IN PERSON|RAIL|into beans. blithe|
+676|73|1|4|24|23353.68|0.01|0.06|N|O|1997-02-05|1997-01-16|1997-03-07|TAKE BACK RETURN|TRUCK|ress, regular dep|
+676|166|1|5|31|33050.96|0.01|0.06|N|O|1997-02-06|1997-02-28|1997-03-08|COLLECT COD|TRUCK|ial deposits cajo|
+676|76|7|6|33|32210.31|0.09|0.05|N|O|1997-03-02|1997-02-22|1997-03-19|TAKE BACK RETURN|TRUCK|as wake slyly furiously close pinto b|
+676|143|6|7|11|11474.54|0.07|0.02|N|O|1997-03-09|1997-03-06|1997-03-31|TAKE BACK RETURN|MAIL|he final acco|
+677|59|7|1|32|30689.60|0.04|0.08|R|F|1994-01-06|1994-01-31|1994-02-02|NONE|RAIL|slyly final|
+677|168|9|2|39|41658.24|0.00|0.07|R|F|1993-12-19|1994-02-11|1994-01-05|TAKE BACK RETURN|SHIP|ges. furiously regular packages use |
+677|24|5|3|46|42504.92|0.01|0.02|R|F|1993-12-02|1994-02-12|1993-12-06|COLLECT COD|RAIL|ng theodolites. furiously unusual theodo|
+677|148|7|4|1|1048.14|0.06|0.05|R|F|1993-12-01|1994-01-14|1993-12-26|DELIVER IN PERSON|MAIL|ly. regular |
+677|150|9|5|25|26253.75|0.00|0.05|A|F|1994-03-12|1994-02-02|1994-03-28|DELIVER IN PERSON|AIR| packages integrate blithely|
+678|146|7|1|20|20922.80|0.05|0.08|R|F|1993-06-21|1993-04-07|1993-07-10|TAKE BACK RETURN|MAIL|furiously express excuses. foxes eat fu|
+678|37|3|2|22|20614.66|0.01|0.02|A|F|1993-05-10|1993-04-29|1993-06-08|NONE|REG AIR|de of the carefully even requests. bl|
+678|143|10|3|16|16690.24|0.06|0.02|R|F|1993-03-20|1993-04-13|1993-04-16|DELIVER IN PERSON|REG AIR|equests cajole around the carefully regular|
+678|199|10|4|48|52761.12|0.08|0.08|R|F|1993-02-28|1993-04-04|1993-03-24|NONE|REG AIR|ithely. slyly express foxes|
+678|98|9|5|16|15969.44|0.06|0.04|R|F|1993-03-09|1993-04-18|1993-04-07|NONE|AIR| about the |
+678|43|4|6|11|10373.44|0.09|0.00|R|F|1993-04-28|1993-05-16|1993-05-11|COLLECT COD|TRUCK|ess deposits dazzle f|
+679|192|3|1|9|9829.71|0.09|0.00|N|O|1995-12-20|1996-01-27|1996-01-07|COLLECT COD|REG AIR|leep slyly. entici|
+704|190|1|1|40|43607.60|0.05|0.05|N|O|1997-01-30|1997-01-10|1997-02-20|COLLECT COD|AIR|ggle quickly. r|
+704|4|5|2|14|12656.00|0.07|0.08|N|O|1997-02-02|1996-12-26|1997-02-19|DELIVER IN PERSON|REG AIR|ve the quickly final forges. furiously p|
+705|189|10|1|46|50102.28|0.05|0.06|N|O|1997-04-18|1997-05-06|1997-05-05|DELIVER IN PERSON|SHIP|ss deposits. ironic packa|
+705|117|7|2|35|35598.85|0.10|0.04|N|O|1997-03-25|1997-03-20|1997-04-23|TAKE BACK RETURN|FOB|carefully ironic accounts|
+706|197|9|1|23|25235.37|0.05|0.00|N|O|1995-12-06|1995-12-02|1995-12-16|COLLECT COD|SHIP|ckey players. requests above the|
+707|155|6|1|34|35875.10|0.01|0.02|R|F|1994-12-08|1995-01-15|1995-01-02|NONE|RAIL| dependencies|
+707|43|10|2|22|20746.88|0.00|0.06|A|F|1995-01-12|1994-12-28|1995-01-16|DELIVER IN PERSON|REG AIR| kindle ironically|
+708|124|7|1|3|3072.36|0.05|0.02|N|O|1998-10-09|1998-09-22|1998-11-07|COLLECT COD|FOB|e slyly pending foxes. |
+708|180|1|2|19|20523.42|0.06|0.00|N|O|1998-10-28|1998-09-23|1998-11-25|COLLECT COD|SHIP| requests. even, thin ideas|
+708|122|5|3|33|33729.96|0.09|0.06|N|O|1998-09-10|1998-09-20|1998-09-22|COLLECT COD|RAIL|s boost carefully ruthless theodolites. f|
+708|56|7|4|5|4780.25|0.07|0.07|N|O|1998-07-22|1998-08-15|1998-07-28|TAKE BACK RETURN|REG AIR|c pinto beans nag after the account|
+708|143|2|5|36|37553.04|0.08|0.01|N|O|1998-07-16|1998-09-04|1998-08-11|NONE|SHIP|ests. even, regular hockey p|
+708|23|6|6|7|6461.14|0.10|0.03|N|O|1998-08-16|1998-08-15|1998-09-10|COLLECT COD|REG AIR|lly express ac|
+709|87|8|1|7|6909.56|0.00|0.00|N|O|1998-06-14|1998-06-08|1998-06-18|TAKE BACK RETURN|RAIL| special orbits cajole |
+709|198|10|2|15|16472.85|0.08|0.00|N|O|1998-07-10|1998-06-26|1998-08-09|NONE|RAIL|ily regular deposits. sauternes was accor|
+709|169|8|3|10|10691.60|0.01|0.02|N|O|1998-06-04|1998-06-30|1998-06-11|NONE|REG AIR|ts cajole boldly |
+709|108|9|4|40|40324.00|0.10|0.08|N|O|1998-08-12|1998-06-20|1998-08-20|DELIVER IN PERSON|RAIL|ggle fluffily carefully ironic|
+710|163|8|1|47|49968.52|0.06|0.08|A|F|1993-01-18|1993-03-24|1993-01-24|TAKE BACK RETURN|MAIL|usual ideas into th|
+710|193|4|2|38|41541.22|0.07|0.02|R|F|1993-04-18|1993-03-12|1993-05-15|COLLECT COD|FOB|sts boost fluffily aft|
+710|139|5|3|7|7273.91|0.04|0.06|R|F|1993-01-20|1993-03-28|1993-02-15|TAKE BACK RETURN|REG AIR|xpress, special ideas. bl|
+710|90|1|4|25|24752.25|0.00|0.05|R|F|1993-03-31|1993-02-05|1993-04-22|COLLECT COD|FOB|eas detect do|
+710|186|7|5|12|13034.16|0.01|0.02|A|F|1993-02-18|1993-02-27|1993-03-07|DELIVER IN PERSON|MAIL|ions. slyly express theodolites al|
+710|114|4|6|21|21296.31|0.04|0.06|R|F|1993-03-22|1993-03-05|1993-03-27|DELIVER IN PERSON|SHIP|es. furiously p|
+710|160|2|7|46|48767.36|0.03|0.07|R|F|1993-04-16|1993-03-27|1993-05-05|COLLECT COD|MAIL|ges use; blithely pending excuses inte|
+711|146|3|1|2|2092.28|0.10|0.04|R|F|1993-12-01|1993-12-09|1993-12-16|DELIVER IN PERSON|REG AIR|ely across t|
+711|103|8|2|27|27083.70|0.00|0.08|A|F|1993-10-02|1993-10-26|1993-10-08|DELIVER IN PERSON|MAIL|slyly. ironic asy|
+711|128|7|3|46|47293.52|0.10|0.00|R|F|1993-12-26|1993-11-19|1994-01-21|TAKE BACK RETURN|MAIL|deposits. permanen|
+711|128|9|4|20|20562.40|0.09|0.00|A|F|1994-01-17|1993-11-10|1994-01-31|DELIVER IN PERSON|TRUCK|kly regular acco|
+736|158|9|1|46|48674.90|0.05|0.01|N|O|1998-07-16|1998-09-01|1998-08-09|NONE|AIR|uctions cajole|
+736|80|1|2|23|22541.84|0.02|0.05|N|O|1998-10-08|1998-08-27|1998-10-19|TAKE BACK RETURN|AIR|k accounts are carefully|
+736|57|9|3|13|12441.65|0.00|0.03|N|O|1998-08-16|1998-07-26|1998-08-19|DELIVER IN PERSON|FOB|st furiously among the |
+736|98|2|4|14|13973.26|0.06|0.04|N|O|1998-10-04|1998-08-14|1998-10-16|COLLECT COD|REG AIR|nstructions.|
+736|169|6|5|32|34213.12|0.04|0.03|N|O|1998-07-30|1998-08-22|1998-08-12|DELIVER IN PERSON|RAIL|iously final accoun|
+737|182|3|1|12|12986.16|0.01|0.01|R|F|1992-04-28|1992-06-30|1992-05-08|COLLECT COD|RAIL|posits after the slyly bold du|
+738|198|1|1|34|37338.46|0.00|0.06|R|F|1993-06-09|1993-04-15|1993-07-09|TAKE BACK RETURN|TRUCK|s against the ironic exc|
+738|188|9|2|4|4352.72|0.00|0.03|A|F|1993-06-20|1993-04-08|1993-07-09|NONE|AIR|ar packages. fluffily bo|
+738|170|1|3|23|24613.91|0.04|0.08|A|F|1993-03-17|1993-04-02|1993-04-05|TAKE BACK RETURN|SHIP|nic, final excuses promise quickly regula|
+738|141|10|4|12|12493.68|0.04|0.08|A|F|1993-06-16|1993-05-05|1993-06-22|NONE|SHIP|ove the slyly regular p|
+738|175|4|5|30|32255.10|0.02|0.00|A|F|1993-06-12|1993-05-29|1993-06-25|NONE|AIR|ecial instructions haggle blithely regula|
+739|85|6|1|28|27582.24|0.00|0.03|N|O|1998-06-03|1998-08-04|1998-06-29|TAKE BACK RETURN|RAIL|elets about the pe|
+739|4|7|2|50|45200.00|0.07|0.06|N|O|1998-08-26|1998-07-16|1998-09-02|COLLECT COD|MAIL|ndencies. blith|
+739|49|2|3|12|11388.48|0.05|0.00|N|O|1998-08-20|1998-07-24|1998-08-22|NONE|MAIL|le slyly along the close i|
+739|44|3|4|47|44369.88|0.09|0.07|N|O|1998-08-12|1998-07-09|1998-08-28|NONE|REG AIR|deas according to the theodolites sn|
+739|188|9|5|30|32645.40|0.07|0.06|N|O|1998-06-19|1998-08-26|1998-07-02|DELIVER IN PERSON|REG AIR|above the even deposits. ironic requests|
+740|2|9|1|22|19844.00|0.10|0.02|N|O|1995-07-24|1995-09-11|1995-08-11|TAKE BACK RETURN|FOB|odolites cajole ironic, pending instruc|
+740|66|1|2|35|33812.10|0.00|0.00|N|O|1995-09-06|1995-08-22|1995-10-02|NONE|TRUCK|p quickly. fu|
+740|199|10|3|29|31876.51|0.06|0.05|N|O|1995-10-26|1995-09-17|1995-10-29|DELIVER IN PERSON|FOB|ntly bold pinto beans sleep quickl|
+741|187|8|1|25|27179.50|0.03|0.06|N|O|1998-07-15|1998-08-27|1998-08-12|DELIVER IN PERSON|MAIL|accounts. blithely bold pa|
+741|91|4|2|22|21803.98|0.09|0.01|N|O|1998-09-07|1998-09-28|1998-09-12|COLLECT COD|AIR|ven deposits about the regular, ironi|
+742|102|3|1|46|46096.60|0.04|0.08|A|F|1995-03-12|1995-03-20|1995-03-16|TAKE BACK RETURN|SHIP|e slyly bold deposits cajole according to|
+742|96|8|2|15|14941.35|0.08|0.05|A|F|1995-02-26|1995-03-20|1995-03-03|NONE|SHIP|blithely unusual pinto|
+742|102|5|3|24|24050.40|0.08|0.08|A|F|1995-02-12|1995-03-12|1995-02-14|DELIVER IN PERSON|SHIP|affix slyly. furiously i|
+742|192|4|4|16|17475.04|0.01|0.05|A|F|1995-01-15|1995-02-25|1995-01-24|COLLECT COD|AIR|eodolites haggle carefully regul|
+742|101|4|5|48|48052.80|0.09|0.08|R|F|1995-03-24|1995-01-23|1995-04-08|TAKE BACK RETURN|TRUCK| platelets |
+742|192|6|6|49|53517.31|0.02|0.07|A|F|1995-01-13|1995-02-13|1995-01-26|TAKE BACK RETURN|RAIL| carefully bold foxes sle|
+743|192|5|1|21|22935.99|0.01|0.04|N|O|1996-10-26|1996-11-05|1996-11-11|COLLECT COD|MAIL|d requests. packages afte|
+768|196|7|1|39|42751.41|0.06|0.08|N|O|1996-09-25|1996-10-27|1996-10-20|NONE|SHIP|out the ironic|
+768|18|9|2|2|1836.02|0.00|0.04|N|O|1996-11-13|1996-10-03|1996-11-25|DELIVER IN PERSON|SHIP|ular courts. slyly dogged accou|
+768|6|1|3|30|27180.00|0.06|0.05|N|O|1996-09-22|1996-11-03|1996-10-13|NONE|MAIL| furiously fluffy pinto beans haggle along|
+768|25|8|4|37|34225.74|0.10|0.00|N|O|1996-10-02|1996-09-23|1996-10-14|TAKE BACK RETURN|REG AIR|ending requests across the quickly|
+768|47|10|5|47|44510.88|0.06|0.05|N|O|1996-11-28|1996-10-30|1996-12-12|NONE|TRUCK|foxes. slyly ironic deposits a|
+768|112|9|6|43|43520.73|0.10|0.06|N|O|1996-09-22|1996-11-03|1996-10-22|TAKE BACK RETURN|AIR|sual ideas wake quickly|
+768|49|10|7|33|31318.32|0.01|0.04|N|O|1996-09-06|1996-09-29|1996-10-01|COLLECT COD|RAIL|sly ironic instructions. excuses can hagg|
+769|176|6|1|36|38742.12|0.02|0.02|A|F|1993-10-01|1993-08-07|1993-10-15|NONE|AIR|es. furiously iro|
+769|160|8|2|4|4240.64|0.01|0.04|R|F|1993-06-25|1993-08-12|1993-07-15|DELIVER IN PERSON|FOB| ideas. even|
+770|181|2|1|39|42166.02|0.09|0.06|N|O|1998-07-19|1998-08-09|1998-08-04|NONE|REG AIR|osits. foxes cajole |
+770|54|2|2|25|23851.25|0.03|0.02|N|O|1998-05-26|1998-07-23|1998-06-04|TAKE BACK RETURN|AIR| deposits dazzle fluffily alongside of |
+771|7|4|1|12|10884.00|0.10|0.08|N|O|1995-07-18|1995-08-02|1995-08-07|COLLECT COD|TRUCK|carefully. pending in|
+771|161|10|2|38|40324.08|0.03|0.08|N|O|1995-07-22|1995-09-10|1995-07-29|TAKE BACK RETURN|REG AIR| quickly final requests are final packages.|
+771|7|8|3|14|12698.00|0.02|0.05|N|O|1995-07-31|1995-08-13|1995-08-07|DELIVER IN PERSON|AIR|r, final packages are slyly iro|
+771|42|3|4|7|6594.28|0.06|0.02|N|O|1995-06-18|1995-08-31|1995-06-20|NONE|REG AIR|theodolites after the fluffily express |
+771|78|6|5|13|12714.91|0.09|0.01|N|O|1995-08-10|1995-08-21|1995-08-30|NONE|FOB|packages affix slyly about the quickly |
+771|82|3|6|23|22587.84|0.08|0.03|N|O|1995-06-19|1995-09-07|1995-07-09|COLLECT COD|FOB|cajole besides the quickly ironic pin|
+772|53|5|1|35|33356.75|0.10|0.06|R|F|1993-07-05|1993-06-05|1993-08-02|NONE|SHIP|kly thin packages wake slowly|
+772|84|5|2|10|9840.80|0.05|0.01|R|F|1993-05-20|1993-05-19|1993-06-15|DELIVER IN PERSON|MAIL| deposits cajole carefully instructions. t|
+772|86|7|3|35|34512.80|0.03|0.04|R|F|1993-04-18|1993-06-13|1993-05-01|COLLECT COD|MAIL|ng ideas. special packages haggle alon|
+772|180|8|4|10|10801.80|0.08|0.02|A|F|1993-05-17|1993-06-09|1993-05-29|COLLECT COD|AIR|o the furiously final deposits. furi|
+772|54|5|5|42|40070.10|0.02|0.07|A|F|1993-06-09|1993-07-16|1993-06-12|DELIVER IN PERSON|MAIL| express foxes abo|
+773|100|1|1|5|5000.50|0.06|0.04|A|F|1993-11-21|1993-12-19|1993-12-21|COLLECT COD|MAIL|ar requests. regular, thin packages u|
+773|11|5|2|31|28241.31|0.02|0.06|A|F|1993-12-30|1993-11-02|1994-01-01|TAKE BACK RETURN|MAIL|e slyly unusual deposit|
+773|151|3|3|39|40994.85|0.06|0.05|A|F|1994-01-04|1993-12-23|1994-01-26|DELIVER IN PERSON|FOB|quickly eve|
+773|29|8|4|28|26012.56|0.10|0.06|R|F|1994-01-19|1993-11-05|1994-01-23|NONE|TRUCK|he furiously slow deposits.|
+773|134|5|5|9|9307.17|0.09|0.02|R|F|1993-10-09|1993-12-25|1993-11-04|TAKE BACK RETURN|FOB|ent orbits haggle fluffily after the |
+773|40|1|6|43|40421.72|0.07|0.03|A|F|1993-11-06|1993-11-20|1993-11-08|TAKE BACK RETURN|SHIP|furiously bold dependencies. blithel|
+774|183|4|1|49|53075.82|0.08|0.03|N|O|1995-12-06|1996-01-07|1995-12-14|DELIVER IN PERSON|SHIP|ess accounts are carefully |
+774|17|4|2|3|2751.03|0.10|0.06|N|O|1996-02-13|1996-01-14|1996-03-04|COLLECT COD|FOB| slyly even courts nag blith|
+774|148|7|3|34|35636.76|0.02|0.07|N|O|1996-03-16|1996-01-03|1996-03-22|COLLECT COD|FOB|lar excuses are furiously final instr|
+774|15|6|4|8|7320.08|0.00|0.02|N|O|1996-01-24|1996-01-15|1996-02-13|COLLECT COD|RAIL|ully ironic requests c|
+774|177|5|5|44|47395.48|0.09|0.07|N|O|1996-02-29|1996-01-16|1996-03-06|NONE|REG AIR|s according to the deposits unwind ca|
+774|120|1|6|2|2040.24|0.07|0.03|N|O|1995-12-11|1996-02-10|1995-12-14|TAKE BACK RETURN|SHIP|accounts; slyly regular|
+775|32|3|1|16|14912.48|0.10|0.06|N|F|1995-05-23|1995-05-07|1995-06-19|NONE|TRUCK|un quickly slyly|
+775|174|2|2|21|22557.57|0.01|0.06|R|F|1995-05-01|1995-06-02|1995-05-13|DELIVER IN PERSON|FOB| quickly sile|
+775|108|5|3|20|20162.00|0.01|0.08|N|F|1995-06-17|1995-05-22|1995-07-13|COLLECT COD|AIR|en dependencies nag slowly |
+800|72|1|1|38|36938.66|0.00|0.05|N|O|1998-07-21|1998-09-25|1998-08-07|TAKE BACK RETURN|TRUCK|according to the bold, final dependencies |
+800|85|6|2|21|20686.68|0.04|0.05|N|O|1998-07-23|1998-10-01|1998-08-20|TAKE BACK RETURN|RAIL|ckly even requests after the carefully r|
+800|176|5|3|26|27980.42|0.01|0.02|N|O|1998-07-23|1998-10-08|1998-07-25|DELIVER IN PERSON|FOB|bove the pending requests.|
+801|6|3|1|13|11778.00|0.10|0.02|R|F|1992-04-25|1992-04-24|1992-05-16|COLLECT COD|RAIL|s are fluffily stealthily expres|
+801|95|8|2|21|20896.89|0.05|0.02|A|F|1992-03-14|1992-04-01|1992-04-05|COLLECT COD|AIR|wake silently furiously idle deposits. |
+801|3|4|3|21|18963.00|0.05|0.03|A|F|1992-04-25|1992-03-20|1992-05-04|COLLECT COD|REG AIR|cial, special packages.|
+801|164|9|4|12|12769.92|0.08|0.04|A|F|1992-06-06|1992-04-14|1992-06-12|TAKE BACK RETURN|RAIL|s. ironic pinto b|
+801|74|2|5|45|43833.15|0.01|0.06|R|F|1992-03-22|1992-03-22|1992-03-25|COLLECT COD|REG AIR| even asymptotes|
+801|122|7|6|10|10221.20|0.08|0.01|A|F|1992-06-05|1992-05-15|1992-06-21|DELIVER IN PERSON|MAIL|al accounts. carefully regular foxes wake|
+801|26|5|7|11|10186.22|0.01|0.03|A|F|1992-05-09|1992-04-19|1992-05-15|DELIVER IN PERSON|REG AIR|y special pinto beans cajole |
+802|143|6|1|40|41725.60|0.08|0.08|A|F|1995-01-07|1995-04-03|1995-01-23|DELIVER IN PERSON|RAIL|y bold accou|
+802|133|4|2|34|35126.42|0.08|0.06|A|F|1995-03-01|1995-03-15|1995-03-12|COLLECT COD|AIR|instructions cajole carefully. quietl|
+802|131|2|3|44|45369.72|0.07|0.04|R|F|1995-01-09|1995-02-04|1995-01-18|TAKE BACK RETURN|SHIP|rmanently idly special requ|
+802|157|2|4|18|19028.70|0.09|0.02|R|F|1995-03-06|1995-02-07|1995-03-19|TAKE BACK RETURN|RAIL|y regular requests engage furiously final d|
+802|132|3|5|19|19610.47|0.08|0.06|A|F|1995-04-01|1995-02-20|1995-04-23|DELIVER IN PERSON|REG AIR|old, furious|
+803|54|9|1|8|7632.40|0.07|0.01|N|O|1997-08-04|1997-06-19|1997-08-12|NONE|SHIP|ronic theodo|
+803|99|10|2|21|20980.89|0.08|0.06|N|O|1997-08-25|1997-06-30|1997-09-10|TAKE BACK RETURN|AIR|ironic packages cajole slyly. un|
+804|126|7|1|30|30783.60|0.08|0.04|A|F|1993-03-29|1993-05-07|1993-04-14|COLLECT COD|REG AIR|ehind the quietly regular pac|
+804|199|3|2|2|2198.38|0.02|0.00|A|F|1993-06-23|1993-04-30|1993-06-25|NONE|TRUCK|slyly silent |
+804|76|5|3|44|42947.08|0.04|0.05|R|F|1993-07-06|1993-04-13|1993-07-28|DELIVER IN PERSON|TRUCK|ly final deposits? special |
+804|38|9|4|21|19698.63|0.01|0.00|A|F|1993-04-12|1993-06-06|1993-04-20|DELIVER IN PERSON|RAIL|ular, ironic foxes. quickly even accounts|
+805|198|10|1|25|27454.75|0.07|0.06|N|O|1995-08-05|1995-09-30|1995-08-06|NONE|AIR|ide of the pending, sly requests. quickly f|
+805|57|5|2|29|27754.45|0.07|0.01|N|O|1995-08-24|1995-08-15|1995-09-16|TAKE BACK RETURN|AIR|dolites according to the slyly f|
+805|47|8|3|12|11364.48|0.01|0.06|N|O|1995-07-13|1995-09-27|1995-08-02|TAKE BACK RETURN|REG AIR| regular foxes. furio|
+805|76|6|4|26|25377.82|0.08|0.07|N|O|1995-08-28|1995-09-24|1995-09-11|TAKE BACK RETURN|RAIL|. ironic deposits sleep across |
+806|105|2|1|1|1005.10|0.04|0.07|N|O|1996-07-14|1996-09-12|1996-07-25|COLLECT COD|RAIL|ar accounts? pending, pending foxes a|
+806|160|5|2|22|23323.52|0.08|0.02|N|O|1996-10-03|1996-08-11|1996-10-20|DELIVER IN PERSON|REG AIR|fily pending |
+806|91|3|3|4|3964.36|0.04|0.03|N|O|1996-08-09|1996-09-18|1996-08-13|COLLECT COD|TRUCK|eans. quickly ironic ideas |
+807|117|7|1|49|49838.39|0.00|0.00|R|F|1993-12-05|1994-01-13|1993-12-25|COLLECT COD|REG AIR| furiously according to the un|
+807|155|10|2|49|51702.35|0.01|0.06|A|F|1994-01-17|1994-01-24|1994-01-22|COLLECT COD|TRUCK|y regular requests haggle.|
+807|181|2|3|48|51896.64|0.07|0.07|A|F|1994-01-08|1994-02-02|1994-01-15|DELIVER IN PERSON|SHIP|kly across the f|
+807|80|1|4|10|9800.80|0.09|0.00|R|F|1994-01-19|1994-02-12|1994-01-28|NONE|TRUCK|furiously final depths sleep a|
+807|143|6|5|30|31294.20|0.02|0.01|R|F|1994-01-19|1994-01-09|1994-01-27|NONE|RAIL|cial accoun|
+807|12|2|6|11|10032.11|0.02|0.04|R|F|1994-03-25|1994-01-26|1994-04-14|NONE|FOB|unts above the slyly final ex|
+807|1|6|7|19|17119.00|0.08|0.05|A|F|1994-02-10|1994-02-20|1994-03-06|NONE|SHIP|ns haggle quickly across the furi|
+832|103|6|1|45|45139.50|0.01|0.02|A|F|1992-05-08|1992-06-06|1992-06-04|COLLECT COD|MAIL|foxes engage slyly alon|
+832|48|1|2|24|22752.96|0.05|0.06|A|F|1992-06-15|1992-07-14|1992-06-17|NONE|TRUCK|ully. carefully speci|
+833|54|5|1|1|954.05|0.04|0.04|R|F|1994-04-26|1994-04-05|1994-04-29|COLLECT COD|MAIL|ffily ironic theodolites|
+833|112|6|2|38|38460.18|0.05|0.05|A|F|1994-04-05|1994-04-21|1994-05-01|COLLECT COD|TRUCK| platelets promise furiously. |
+833|162|7|3|9|9559.44|0.05|0.07|A|F|1994-02-28|1994-04-26|1994-03-20|TAKE BACK RETURN|FOB|ecial, even requests. even, bold instructi|
+834|145|2|1|36|37625.04|0.06|0.04|R|F|1994-06-28|1994-07-25|1994-07-07|TAKE BACK RETURN|SHIP|ccounts haggle after the furiously |
+834|7|2|2|11|9977.00|0.03|0.00|A|F|1994-09-18|1994-08-03|1994-10-02|DELIVER IN PERSON|TRUCK|inst the regular packa|
+835|107|2|1|33|33234.30|0.09|0.06|N|O|1995-11-01|1995-12-02|1995-11-24|DELIVER IN PERSON|RAIL|onic instructions among the carefully iro|
+835|185|6|2|28|30385.04|0.02|0.02|N|O|1995-12-27|1995-12-11|1996-01-21|NONE|SHIP| fluffily furious pinto beans|
+836|188|9|1|6|6529.08|0.09|0.03|N|O|1996-12-09|1997-01-31|1996-12-29|COLLECT COD|TRUCK|fully bold theodolites are daringly across|
+836|84|5|2|18|17713.44|0.03|0.05|N|O|1997-02-27|1997-02-11|1997-03-22|NONE|REG AIR|y pending packages use alon|
+836|141|8|3|46|47892.44|0.05|0.07|N|O|1997-03-21|1997-02-06|1997-04-05|NONE|REG AIR|boldly final pinto beans haggle furiously|
+837|57|5|1|39|37324.95|0.03|0.08|A|F|1994-07-22|1994-08-10|1994-08-11|NONE|RAIL|ecial pinto bea|
+837|88|9|2|24|23713.92|0.08|0.00|R|F|1994-06-27|1994-09-02|1994-07-27|DELIVER IN PERSON|FOB|p carefully. theodolites use. bold courts a|
+838|134|10|1|20|20682.60|0.10|0.07|N|O|1998-04-11|1998-03-25|1998-04-19|COLLECT COD|TRUCK| furiously final ideas. slow, bold |
+838|29|10|2|27|25083.54|0.05|0.07|N|O|1998-02-15|1998-04-03|1998-02-20|DELIVER IN PERSON|SHIP| pending pinto beans haggle about t|
+838|95|7|3|23|22887.07|0.10|0.07|N|O|1998-03-26|1998-04-17|1998-04-02|COLLECT COD|AIR|ets haggle furiously furiously regular r|
+838|44|5|4|18|16992.72|0.09|0.00|N|O|1998-03-28|1998-04-06|1998-03-31|TAKE BACK RETURN|AIR|hely unusual foxes. furio|
+839|158|10|1|23|24337.45|0.07|0.02|N|O|1995-10-17|1995-11-03|1995-11-04|COLLECT COD|AIR|ng ideas haggle accord|
+839|189|10|2|47|51191.46|0.08|0.00|N|O|1995-10-17|1995-11-06|1995-11-10|NONE|AIR|refully final excuses about |
+864|130|5|1|34|35024.42|0.03|0.04|N|O|1997-12-16|1997-10-23|1998-01-12|TAKE BACK RETURN|SHIP|gside of the furiously special|
+864|98|1|2|7|6986.63|0.01|0.02|N|O|1997-11-13|1997-10-07|1997-12-13|TAKE BACK RETURN|MAIL|ven requests should sleep along |
+864|80|10|3|34|33322.72|0.03|0.00|N|O|1997-09-14|1997-11-04|1997-09-21|TAKE BACK RETURN|REG AIR|to the furiously ironic platelets! |
+865|198|10|1|16|17571.04|0.07|0.03|R|F|1993-08-24|1993-06-26|1993-08-28|TAKE BACK RETURN|TRUCK|y even accounts. quickly bold decoys|
+865|20|7|2|3|2760.06|0.02|0.05|A|F|1993-07-17|1993-07-14|1993-08-01|NONE|MAIL|fully regular the|
+865|87|8|3|15|14806.20|0.00|0.06|R|F|1993-07-05|1993-06-25|1993-07-26|NONE|SHIP| deposits sleep quickl|
+865|169|4|4|34|36351.44|0.09|0.06|A|F|1993-05-09|1993-07-28|1993-05-18|DELIVER IN PERSON|REG AIR|furiously fluffily unusual account|
+866|136|7|1|5|5180.65|0.08|0.00|R|F|1993-01-22|1993-01-14|1993-02-07|TAKE BACK RETURN|AIR|tegrate fluffily. carefully f|
+867|139|10|1|7|7273.91|0.04|0.07|A|F|1994-02-19|1993-12-25|1994-02-25|DELIVER IN PERSON|TRUCK|pendencies-- slyly unusual packages hagg|
+868|168|9|1|8|8545.28|0.06|0.03|R|F|1992-10-07|1992-08-01|1992-10-16|NONE|MAIL|l deposits. blithely regular pint|
+868|29|8|2|13|12077.26|0.05|0.07|R|F|1992-07-25|1992-08-26|1992-08-04|NONE|AIR|gged instructi|
+868|68|5|3|19|18393.14|0.09|0.06|R|F|1992-06-22|1992-08-27|1992-07-04|COLLECT COD|SHIP|lyly ironic platelets wake. rut|
+868|122|1|4|43|43951.16|0.02|0.04|A|F|1992-07-02|1992-07-22|1992-07-21|COLLECT COD|SHIP|kly silent deposits wake dar|
+868|25|8|5|27|24975.54|0.04|0.01|R|F|1992-08-01|1992-08-25|1992-08-12|TAKE BACK RETURN|RAIL|oss the fluffily unusual pinto |
+868|125|6|6|19|19477.28|0.02|0.05|R|F|1992-09-20|1992-07-18|1992-10-04|NONE|FOB|ely even deposits lose blithe|
+869|63|2|1|27|26002.62|0.07|0.07|N|O|1997-01-30|1997-02-17|1997-02-26|TAKE BACK RETURN|TRUCK|uffily even excuses? slyly even deposits |
+869|47|4|2|36|34093.44|0.04|0.01|N|O|1997-05-03|1997-03-17|1997-05-24|NONE|RAIL|ong the furiously bold instructi|
+870|50|9|1|36|34201.80|0.04|0.07|A|F|1993-10-18|1993-09-16|1993-11-15|COLLECT COD|MAIL|fily. furiously final accounts are |
+870|186|7|2|5|5430.90|0.06|0.05|A|F|1993-08-13|1993-09-11|1993-08-24|COLLECT COD|FOB|e slyly excuses. ironi|
+871|97|8|1|48|47860.32|0.10|0.03|N|O|1996-02-25|1996-02-09|1996-03-18|NONE|AIR|coys dazzle slyly slow notornis. f|
+871|55|10|2|47|44887.35|0.07|0.03|N|O|1995-12-25|1996-02-01|1996-01-24|TAKE BACK RETURN|RAIL|ss, final dep|
+871|108|5|3|13|13105.30|0.09|0.01|N|O|1996-01-25|1996-01-24|1996-02-03|NONE|REG AIR| haggle furiou|
+871|190|1|4|29|31615.51|0.06|0.07|N|O|1995-11-16|1996-01-27|1995-12-16|DELIVER IN PERSON|RAIL|ests are carefu|
+871|128|7|5|8|8224.96|0.00|0.01|N|O|1995-11-25|1996-01-12|1995-12-12|DELIVER IN PERSON|AIR|lar ideas-- slyly even accou|
+871|143|2|6|26|27121.64|0.00|0.06|N|O|1996-02-07|1996-01-05|1996-02-25|COLLECT COD|AIR|symptotes use quickly near the |
+871|174|3|7|4|4296.68|0.00|0.07|N|O|1996-03-09|1996-01-20|1996-03-26|COLLECT COD|FOB|l, regular dependencies w|
+896|39|10|1|47|44134.41|0.07|0.08|R|F|1993-05-28|1993-05-15|1993-06-15|DELIVER IN PERSON|TRUCK|ly even pinto beans integrate. b|
+896|198|2|2|10|10981.90|0.03|0.07|A|F|1993-07-07|1993-06-03|1993-07-24|COLLECT COD|SHIP| quickly even theodolites. carefully regu|
+896|2|9|3|7|6314.00|0.09|0.02|A|F|1993-05-02|1993-05-24|1993-05-31|DELIVER IN PERSON|MAIL| requests |
+896|152|3|4|11|11573.65|0.08|0.04|A|F|1993-05-19|1993-05-22|1993-06-08|COLLECT COD|MAIL|the multipliers sleep|
+896|188|9|5|34|36998.12|0.00|0.05|R|F|1993-05-21|1993-06-01|1993-05-23|NONE|TRUCK|ular, close requests cajo|
+896|177|6|6|44|47395.48|0.09|0.08|R|F|1993-05-19|1993-04-14|1993-06-02|DELIVER IN PERSON|FOB|lar, pending packages. deposits are q|
+896|109|2|7|11|11100.10|0.01|0.07|A|F|1993-05-01|1993-04-09|1993-05-06|TAKE BACK RETURN|FOB|rding to the pinto beans wa|
+897|91|4|1|15|14866.35|0.07|0.04|R|F|1995-05-25|1995-05-09|1995-06-07|COLLECT COD|REG AIR|r ideas. slyly spec|
+897|184|5|2|26|28188.68|0.05|0.08|N|O|1995-07-01|1995-06-10|1995-07-14|COLLECT COD|MAIL|tions sleep according to the special|
+897|126|1|3|13|13339.56|0.07|0.00|A|F|1995-03-30|1995-05-17|1995-04-21|TAKE BACK RETURN|MAIL|bold accounts mold carefully! braids|
+897|102|7|4|2|2004.20|0.08|0.08|R|F|1995-05-22|1995-05-07|1995-06-16|COLLECT COD|RAIL|into beans. slyly special fox|
+898|161|2|1|9|9550.44|0.07|0.08|A|F|1993-07-04|1993-07-09|1993-07-25|NONE|AIR|e slyly across the blithe|
+898|179|7|2|37|39929.29|0.03|0.05|A|F|1993-08-17|1993-08-04|1993-09-01|DELIVER IN PERSON|REG AIR|packages sleep furiously|
+898|49|8|3|11|10439.44|0.01|0.00|A|F|1993-09-13|1993-08-31|1993-09-25|TAKE BACK RETURN|MAIL|etly bold accounts |
+898|193|6|4|36|39354.84|0.04|0.07|R|F|1993-08-04|1993-07-25|1993-08-23|DELIVER IN PERSON|REG AIR| after the carefully |
+899|61|10|1|18|17299.08|0.04|0.05|N|O|1998-08-06|1998-05-09|1998-09-05|DELIVER IN PERSON|AIR|re daring, pending deposits. blit|
+899|47|4|2|25|23676.00|0.00|0.07|N|O|1998-07-21|1998-05-12|1998-08-16|NONE|REG AIR|rly final sentiments. bold pinto beans |
+899|85|6|3|4|3940.32|0.09|0.05|N|O|1998-06-02|1998-06-28|1998-06-14|TAKE BACK RETURN|REG AIR|ter the carefully regular deposits are agai|
+899|180|9|4|14|15122.52|0.05|0.03|N|O|1998-05-21|1998-05-28|1998-06-03|TAKE BACK RETURN|FOB|ades impress carefully|
+899|71|10|5|4|3884.28|0.06|0.02|N|O|1998-04-11|1998-05-14|1998-04-27|NONE|TRUCK|ges. blithe, ironic waters cajole care|
+899|120|4|6|47|47945.64|0.00|0.04|N|O|1998-04-14|1998-05-30|1998-05-13|DELIVER IN PERSON|TRUCK|furiously final foxes after the s|
+899|14|1|7|11|10054.11|0.02|0.08|N|O|1998-06-03|1998-06-15|1998-06-20|COLLECT COD|REG AIR|t the ironic|
+900|199|1|1|44|48364.36|0.01|0.06|R|F|1994-12-15|1994-12-03|1994-12-27|COLLECT COD|MAIL| detect quick|
+900|115|6|2|48|48725.28|0.08|0.04|A|F|1994-12-22|1994-11-08|1995-01-19|COLLECT COD|TRUCK|cial pinto beans nag |
+900|75|6|3|24|23401.68|0.03|0.00|R|F|1994-10-21|1994-12-25|1994-10-22|TAKE BACK RETURN|SHIP|-ray furiously un|
+901|22|7|1|36|33192.72|0.01|0.01|N|O|1998-08-11|1998-10-09|1998-08-27|DELIVER IN PERSON|REG AIR|. accounts are care|
+901|46|7|2|2|1892.08|0.09|0.02|N|O|1998-10-25|1998-09-27|1998-11-01|DELIVER IN PERSON|AIR|d foxes use slyly|
+901|43|10|3|37|34892.48|0.04|0.08|N|O|1998-11-01|1998-09-13|1998-11-05|NONE|AIR|ickly final deposits |
+901|18|9|4|11|10098.11|0.00|0.06|N|O|1998-11-13|1998-10-19|1998-11-14|TAKE BACK RETURN|TRUCK|ourts among the quickly expre|
+902|111|2|1|3|3033.33|0.06|0.00|R|F|1994-10-01|1994-10-25|1994-10-28|COLLECT COD|MAIL|into beans thrash blithely about the flu|
+902|118|2|2|8|8144.88|0.06|0.07|R|F|1994-10-25|1994-09-20|1994-11-07|COLLECT COD|RAIL| orbits al|
+902|165|2|3|24|25563.84|0.02|0.05|R|F|1994-11-08|1994-10-12|1994-11-26|NONE|FOB|. blithely even accounts poach furiously i|
+903|65|10|1|27|26056.62|0.04|0.03|N|O|1995-09-18|1995-09-20|1995-10-02|TAKE BACK RETURN|SHIP|lly pending foxes. furiously|
+903|9|2|2|35|31815.00|0.06|0.05|N|O|1995-09-18|1995-08-21|1995-10-12|TAKE BACK RETURN|TRUCK|rets wake fin|
+903|9|2|3|33|29997.00|0.02|0.03|N|O|1995-09-24|1995-09-01|1995-10-12|COLLECT COD|MAIL|ely ironic packages wake blithely|
+903|56|1|4|9|8604.45|0.09|0.00|N|O|1995-10-06|1995-09-14|1995-10-24|NONE|TRUCK|he slyly ev|
+903|42|3|5|1|942.04|0.04|0.00|N|O|1995-10-22|1995-09-13|1995-11-03|NONE|AIR|y final platelets sublate among the |
+903|168|9|6|13|13886.08|0.07|0.02|N|O|1995-09-11|1995-10-04|1995-10-03|COLLECT COD|SHIP|sleep along the final|
+928|169|10|1|29|31005.64|0.07|0.02|R|F|1995-05-17|1995-05-12|1995-05-21|NONE|REG AIR|ly alongside of the s|
+928|48|7|2|24|22752.96|0.05|0.08|A|F|1995-04-06|1995-05-08|1995-04-24|DELIVER IN PERSON|AIR|s the furiously regular warthogs im|
+928|152|10|3|46|48398.90|0.08|0.00|A|F|1995-05-09|1995-04-09|1995-06-01|DELIVER IN PERSON|REG AIR| beans sleep against the carefully ir|
+928|52|4|4|43|40938.15|0.10|0.05|A|F|1995-04-14|1995-04-21|1995-05-09|NONE|REG AIR|blithely. express, silent requests doze at|
+928|12|3|5|38|34656.38|0.02|0.08|N|F|1995-06-08|1995-04-15|1995-06-30|TAKE BACK RETURN|SHIP|xpress grouc|
+928|55|6|6|50|47752.50|0.05|0.00|N|F|1995-06-07|1995-04-15|1995-07-01|DELIVER IN PERSON|TRUCK| slyly slyly special request|
+928|11|5|7|11|10021.11|0.00|0.01|A|F|1995-04-29|1995-04-16|1995-04-30|NONE|AIR|longside of|
+929|129|8|1|45|46310.40|0.09|0.01|R|F|1993-01-24|1992-12-06|1993-02-16|DELIVER IN PERSON|REG AIR|ges haggle careful|
+929|175|5|2|44|47307.48|0.02|0.00|A|F|1992-10-09|1992-11-20|1992-10-22|DELIVER IN PERSON|SHIP|s. excuses cajole. carefully regu|
+929|74|5|3|14|13636.98|0.06|0.07|A|F|1992-10-21|1992-11-17|1992-11-15|NONE|FOB|gainst the|
+929|102|5|4|7|7014.70|0.06|0.01|A|F|1992-12-24|1992-12-19|1993-01-08|TAKE BACK RETURN|TRUCK|ithely. slyly c|
+930|45|4|1|36|34021.44|0.10|0.04|R|F|1994-12-21|1995-02-20|1994-12-24|COLLECT COD|RAIL|quickly regular pinto beans sle|
+930|18|8|2|47|43146.47|0.08|0.00|A|F|1995-03-20|1995-02-04|1995-04-04|DELIVER IN PERSON|AIR|ackages. fluffily e|
+930|65|10|3|10|9650.60|0.07|0.08|A|F|1994-12-18|1995-01-27|1995-01-16|COLLECT COD|AIR|ckly regular requests: regular instructions|
+930|100|2|4|21|21002.10|0.06|0.02|A|F|1995-02-16|1995-03-03|1995-03-13|DELIVER IN PERSON|SHIP|foxes. regular deposits integrate carefu|
+930|164|9|5|50|53208.00|0.03|0.06|A|F|1995-04-03|1995-01-29|1995-04-22|COLLECT COD|MAIL| excuses among the furiously express ideas |
+930|145|4|6|10|10451.40|0.00|0.04|A|F|1995-02-09|1995-02-17|1995-02-16|NONE|SHIP|blithely bold i|
+930|167|4|7|30|32014.80|0.07|0.08|R|F|1995-01-20|1995-02-28|1995-02-04|TAKE BACK RETURN|RAIL|g accounts sleep along the platelets.|
+931|40|1|1|18|16920.72|0.00|0.05|A|F|1993-04-04|1993-01-11|1993-04-13|NONE|RAIL|slyly ironic re|
+931|17|7|2|10|9170.10|0.05|0.07|A|F|1993-03-01|1993-01-09|1993-03-07|TAKE BACK RETURN|SHIP|ajole quickly. slyly sil|
+931|147|6|3|48|50262.72|0.01|0.08|A|F|1993-02-03|1993-03-02|1993-02-09|TAKE BACK RETURN|FOB|ep alongside of the fluffy |
+931|82|3|4|38|37319.04|0.08|0.08|A|F|1993-03-06|1993-02-24|1993-03-27|DELIVER IN PERSON|RAIL|usly final packages integrate carefully|
+932|44|1|1|41|38705.64|0.01|0.05|N|O|1997-06-05|1997-07-22|1997-06-26|COLLECT COD|RAIL|foxes. ironic pl|
+933|49|8|1|23|21827.92|0.02|0.04|R|F|1992-08-13|1992-09-18|1992-08-25|DELIVER IN PERSON|MAIL| the furiously bold dinos. sly|
+933|13|4|2|27|24651.27|0.02|0.01|R|F|1992-10-03|1992-10-02|1992-10-26|DELIVER IN PERSON|RAIL|ests. express|
+933|100|2|3|26|26002.60|0.05|0.00|A|F|1992-11-09|1992-11-03|1992-11-16|DELIVER IN PERSON|AIR| the deposits affix slyly after t|
+934|118|5|1|18|18325.98|0.07|0.01|N|O|1996-09-10|1996-09-20|1996-09-25|COLLECT COD|RAIL|y unusual requests dazzle above t|
+935|28|3|1|23|21344.46|0.05|0.00|N|O|1997-11-11|1997-11-22|1997-11-29|COLLECT COD|REG AIR|ular accounts about|
+935|65|10|2|23|22196.38|0.02|0.01|N|O|1998-01-11|1997-11-25|1998-02-05|COLLECT COD|TRUCK|hes haggle furiously dolphins. qu|
+935|135|1|3|36|37264.68|0.06|0.00|N|O|1997-11-05|1997-12-05|1997-11-25|TAKE BACK RETURN|AIR|leep about the exp|
+935|58|3|4|13|12454.65|0.08|0.04|N|O|1998-01-13|1997-11-30|1998-02-08|DELIVER IN PERSON|TRUCK|ld platelet|
+935|13|7|5|8|7304.08|0.02|0.05|N|O|1998-01-12|1997-11-02|1998-02-05|NONE|TRUCK|cept the quickly regular p|
+935|59|1|6|1|959.05|0.01|0.08|N|O|1997-12-14|1997-11-22|1998-01-08|DELIVER IN PERSON|TRUCK| instructions. ironic acc|
+960|107|10|1|1|1007.10|0.07|0.00|A|F|1994-12-24|1994-10-26|1995-01-20|DELIVER IN PERSON|AIR|y ironic packages. quickly even |
+960|117|7|2|25|25427.75|0.06|0.08|R|F|1994-12-01|1994-10-29|1994-12-27|DELIVER IN PERSON|RAIL|ts. fluffily regular requests |
+960|175|3|3|32|34405.44|0.01|0.08|R|F|1995-01-19|1994-12-17|1995-02-04|DELIVER IN PERSON|FOB|around the blithe, even pl|
+961|118|5|1|7|7126.77|0.10|0.00|N|O|1995-07-23|1995-07-20|1995-08-11|TAKE BACK RETURN|RAIL|usual dolphins. ironic pearls sleep blit|
+961|91|2|2|18|17839.62|0.09|0.05|N|O|1995-07-01|1995-08-14|1995-07-04|DELIVER IN PERSON|AIR|rmanent foxes haggle speci|
+961|97|8|3|42|41877.78|0.06|0.01|N|O|1995-08-24|1995-08-21|1995-09-10|TAKE BACK RETURN|SHIP|ests do cajole blithely. furiously bo|
+961|34|10|4|29|27086.87|0.00|0.07|N|F|1995-06-10|1995-08-20|1995-06-26|TAKE BACK RETURN|TRUCK|l accounts use blithely against the|
+961|26|7|5|38|35188.76|0.03|0.05|N|O|1995-08-21|1995-07-19|1995-08-27|NONE|RAIL|he blithely special requests. furiousl|
+961|197|8|6|30|32915.70|0.09|0.03|N|O|1995-07-06|1995-07-20|1995-07-26|DELIVER IN PERSON|MAIL|warhorses slee|
+962|57|8|1|36|34453.80|0.01|0.03|R|F|1994-08-09|1994-07-10|1994-09-02|COLLECT COD|TRUCK|al foxes. iron|
+962|36|2|2|27|25272.81|0.09|0.02|A|F|1994-05-11|1994-07-10|1994-06-03|TAKE BACK RETURN|SHIP|y slyly express deposits. final i|
+962|80|1|3|3|2940.24|0.07|0.08|A|F|1994-05-08|1994-07-06|1994-06-02|DELIVER IN PERSON|FOB|ag furiously. even pa|
+962|57|5|4|20|19141.00|0.04|0.02|R|F|1994-08-26|1994-06-27|1994-09-11|DELIVER IN PERSON|SHIP| deposits use fluffily according to |
+962|152|7|5|12|12625.80|0.02|0.00|A|F|1994-06-09|1994-06-07|1994-06-11|COLLECT COD|TRUCK|across the furiously regular escapades daz|
+962|188|9|6|5|5440.90|0.02|0.05|A|F|1994-08-29|1994-07-15|1994-09-19|COLLECT COD|TRUCK|efully bold packages run slyly caref|
+963|194|8|1|7|7659.33|0.01|0.00|R|F|1994-09-12|1994-07-18|1994-09-17|DELIVER IN PERSON|REG AIR|s. slyly regular depe|
+963|98|10|2|48|47908.32|0.10|0.06|R|F|1994-08-25|1994-08-12|1994-09-21|DELIVER IN PERSON|RAIL|ages. quickly express deposits cajole pe|
+964|199|10|1|39|42868.41|0.04|0.01|N|O|1995-06-21|1995-07-24|1995-06-24|NONE|AIR|se furiously regular instructions. blith|
+964|113|4|2|1|1013.11|0.02|0.05|N|O|1995-08-20|1995-07-29|1995-09-10|DELIVER IN PERSON|REG AIR|unts. quickly even platelets s|
+964|57|5|3|49|46895.45|0.01|0.03|N|O|1995-09-06|1995-08-10|1995-10-05|NONE|MAIL|ounts. blithely regular packag|
+964|55|3|4|44|42022.20|0.05|0.02|N|O|1995-09-18|1995-08-02|1995-10-17|TAKE BACK RETURN|TRUCK|ronic deposit|
+965|108|1|1|20|20162.00|0.04|0.05|N|F|1995-06-16|1995-07-20|1995-07-06|COLLECT COD|MAIL|kly. carefully pending requ|
+965|18|5|2|23|21114.23|0.06|0.08|N|O|1995-07-12|1995-07-08|1995-08-11|COLLECT COD|MAIL|ld kindle carefully across th|
+966|180|8|1|19|20523.42|0.07|0.01|N|O|1998-05-26|1998-07-15|1998-05-29|COLLECT COD|FOB|efully final pinto beans. quickly |
+966|117|4|2|42|42718.62|0.02|0.06|N|O|1998-06-28|1998-06-20|1998-07-05|NONE|TRUCK|tions boost furiously car|
+966|22|1|3|42|38724.84|0.06|0.08|N|O|1998-06-15|1998-06-08|1998-07-05|NONE|RAIL|sly ironic asymptotes hagg|
+966|5|2|4|20|18100.00|0.04|0.01|N|O|1998-07-19|1998-07-15|1998-07-27|NONE|TRUCK|pecial ins|
+967|59|4|1|41|39321.05|0.05|0.05|R|F|1992-09-21|1992-08-15|1992-10-21|NONE|MAIL|ld foxes wake closely special|
+967|85|6|2|4|3940.32|0.01|0.02|A|F|1992-07-15|1992-07-27|1992-07-18|DELIVER IN PERSON|REG AIR|platelets hang carefully along |
+967|132|8|3|10|10321.30|0.00|0.02|A|F|1992-09-18|1992-08-06|1992-09-19|DELIVER IN PERSON|MAIL|old pinto beans alongside of the exp|
+967|148|7|4|49|51358.86|0.01|0.04|A|F|1992-09-28|1992-09-15|1992-10-14|NONE|SHIP|the slyly even ideas. carefully even|
+967|17|1|5|41|37597.41|0.08|0.04|A|F|1992-07-23|1992-08-07|1992-08-13|TAKE BACK RETURN|FOB|efully special ide|
+967|106|9|6|17|17103.70|0.05|0.06|A|F|1992-10-02|1992-08-19|1992-10-25|NONE|MAIL|y ironic foxes caj|
+967|161|8|7|18|19100.88|0.00|0.02|A|F|1992-10-06|1992-08-05|1992-10-15|DELIVER IN PERSON|RAIL|ngage blith|
+992|60|2|1|14|13440.84|0.10|0.03|N|O|1998-01-29|1997-12-29|1998-02-18|TAKE BACK RETURN|MAIL|the unusual, even dependencies affix fluff|
+992|38|9|2|34|31893.02|0.02|0.00|N|O|1997-11-29|1998-01-21|1997-11-30|NONE|RAIL|s use silently. blithely regular ideas b|
+992|105|6|3|30|30153.00|0.10|0.00|N|O|1997-12-15|1998-02-02|1998-01-12|NONE|SHIP|nic instructions n|
+992|48|5|4|21|19908.84|0.06|0.06|N|O|1997-11-13|1997-12-28|1997-12-10|NONE|TRUCK|fily. quickly special deposit|
+992|92|4|5|7|6944.63|0.09|0.05|N|O|1997-11-30|1997-12-24|1997-12-16|DELIVER IN PERSON|TRUCK|ideas haggle. special theodolit|
+992|75|3|6|41|39977.87|0.10|0.01|N|O|1997-11-14|1998-02-04|1997-11-23|TAKE BACK RETURN|AIR|eodolites cajole across the accounts.|
+993|175|5|1|33|35480.61|0.01|0.05|N|O|1996-01-03|1995-11-28|1996-01-23|DELIVER IN PERSON|AIR| the deposits affix agains|
+993|3|6|2|28|25284.00|0.06|0.08|N|O|1995-10-24|1995-11-20|1995-11-06|DELIVER IN PERSON|RAIL|lites. even theodolite|
+993|40|1|3|10|9400.40|0.03|0.08|N|O|1995-12-17|1995-11-13|1995-12-20|NONE|RAIL|encies wake fur|
+993|191|4|4|40|43647.60|0.01|0.01|N|O|1995-11-16|1995-11-01|1995-12-05|TAKE BACK RETURN|RAIL|gle above the furiously |
+993|146|7|5|33|34522.62|0.09|0.08|N|O|1995-09-28|1995-10-24|1995-10-03|COLLECT COD|RAIL|fluffily. quiet excuses sleep furiously sly|
+993|137|3|6|35|36299.55|0.04|0.02|N|O|1995-10-26|1995-10-20|1995-11-05|DELIVER IN PERSON|FOB|es. ironic, ironic requests|
+993|5|2|7|15|13575.00|0.09|0.03|N|O|1995-09-27|1995-10-21|1995-10-17|DELIVER IN PERSON|MAIL|sits. pending pinto beans haggle? ca|
+994|65|6|1|4|3860.24|0.07|0.03|R|F|1994-07-05|1994-05-21|1994-07-20|COLLECT COD|SHIP|aggle carefully acc|
+994|10|3|2|11|10010.11|0.01|0.00|R|F|1994-05-03|1994-06-10|1994-05-22|NONE|AIR|ular accounts sleep |
+994|31|7|3|5|4655.15|0.08|0.08|A|F|1994-06-24|1994-06-14|1994-06-26|NONE|MAIL|ainst the pending requests. packages sl|
+994|131|7|4|25|25778.25|0.10|0.00|A|F|1994-06-03|1994-06-02|1994-06-06|COLLECT COD|RAIL|usual pinto beans.|
+995|173|1|1|15|16097.55|0.08|0.05|N|O|1995-06-30|1995-08-04|1995-07-27|NONE|REG AIR|uses. fluffily fina|
+995|129|4|2|28|28815.36|0.08|0.03|N|F|1995-06-12|1995-07-20|1995-06-19|DELIVER IN PERSON|SHIP|pades. quick, final frays use flu|
+995|166|3|3|45|47977.20|0.00|0.05|N|O|1995-08-02|1995-07-21|1995-08-03|DELIVER IN PERSON|SHIP|lar packages detect blithely above t|
+995|66|3|4|25|24151.50|0.01|0.08|N|O|1995-09-08|1995-08-05|1995-09-28|NONE|TRUCK|lyly even |
+995|24|5|5|18|16632.36|0.06|0.03|N|O|1995-07-03|1995-07-29|1995-07-22|TAKE BACK RETURN|AIR| even accounts unwind c|
+996|173|2|1|43|46146.31|0.03|0.06|N|O|1998-03-27|1998-03-25|1998-04-06|COLLECT COD|SHIP| the blithely ironic foxes. slyly silent d|
+997|163|4|1|11|11694.76|0.00|0.02|N|O|1997-06-16|1997-07-21|1997-07-14|DELIVER IN PERSON|TRUCK|p furiously according to t|
+997|48|9|2|17|16116.68|0.03|0.00|N|O|1997-07-28|1997-07-26|1997-08-20|DELIVER IN PERSON|SHIP|aggle quickly furiously|
+998|10|7|1|22|20020.22|0.04|0.05|A|F|1994-12-03|1995-02-17|1994-12-19|TAKE BACK RETURN|RAIL|lites. qui|
+998|181|2|2|7|7568.26|0.10|0.05|R|F|1995-03-24|1995-01-18|1995-04-03|NONE|MAIL|nic deposits. even asym|
+998|142|9|3|30|31264.20|0.05|0.07|A|F|1994-12-02|1995-01-23|1994-12-23|NONE|SHIP|lyly idle Tir|
+998|11|8|4|6|5466.06|0.09|0.05|R|F|1995-03-20|1994-12-27|1995-04-13|DELIVER IN PERSON|MAIL|refully accounts. carefully express ac|
+998|73|2|5|1|973.07|0.04|0.00|R|F|1995-01-05|1995-01-06|1995-01-13|NONE|SHIP|es sleep. regular dependencies use bl|
+999|61|6|1|34|32676.04|0.00|0.08|R|F|1993-10-30|1993-10-17|1993-10-31|TAKE BACK RETURN|SHIP|its. daringly final instruc|
+999|199|1|2|41|45066.79|0.08|0.01|A|F|1993-10-16|1993-12-04|1993-11-03|DELIVER IN PERSON|REG AIR|us depths. carefully ironic instruc|
+999|118|5|3|15|15271.65|0.07|0.06|A|F|1993-12-12|1993-10-18|1994-01-08|COLLECT COD|REG AIR|y ironic requests. carefully regu|
+999|3|4|4|10|9030.00|0.05|0.02|A|F|1993-11-23|1993-12-02|1993-11-29|NONE|MAIL|efully pending|
+999|19|10|5|3|2757.03|0.03|0.00|R|F|1993-09-17|1993-10-22|1993-10-13|NONE|FOB|nic, pending ideas. bl|
+999|181|2|6|37|40003.66|0.00|0.04|R|F|1994-01-03|1993-10-28|1994-01-12|DELIVER IN PERSON|TRUCK|ckly slyly unusual packages: packages hagg|
+1024|199|2|1|49|53860.31|0.03|0.05|N|O|1998-03-06|1998-01-26|1998-03-29|TAKE BACK RETURN|FOB|ts. asymptotes nag fur|
+1024|126|5|2|34|34888.08|0.00|0.01|N|O|1998-01-06|1998-02-05|1998-01-26|COLLECT COD|SHIP|des the slyly even|
+1024|44|3|3|28|26433.12|0.04|0.01|N|O|1998-03-04|1998-03-12|1998-03-15|TAKE BACK RETURN|TRUCK|e blithely regular pi|
+1024|184|5|4|13|14094.34|0.02|0.04|N|O|1998-04-11|1998-02-26|1998-04-18|NONE|FOB|e slyly around the slyly special instructi|
+1024|21|4|5|49|45129.98|0.02|0.04|N|O|1998-02-27|1998-03-10|1998-03-27|COLLECT COD|FOB| carefully bold |
+1025|150|1|1|36|37805.40|0.03|0.04|A|F|1995-05-15|1995-07-05|1995-06-10|COLLECT COD|FOB|e unusual, regular instr|
+1025|69|10|2|23|22288.38|0.08|0.03|N|F|1995-06-02|1995-07-29|1995-06-23|COLLECT COD|RAIL| regular platelets nag carefu|
+1025|23|2|3|25|23075.50|0.06|0.05|R|F|1995-05-29|1995-06-21|1995-06-13|DELIVER IN PERSON|REG AIR|xpress foxes. furiousl|
+1026|38|4|1|36|33769.08|0.10|0.02|N|O|1997-06-14|1997-07-20|1997-06-23|NONE|SHIP|st the ide|
+1026|37|8|2|6|5622.18|0.10|0.08|N|O|1997-07-07|1997-08-16|1997-07-14|TAKE BACK RETURN|TRUCK|to beans. special, regular packages hagg|
+1027|156|1|1|43|45414.45|0.07|0.08|R|F|1992-06-17|1992-08-28|1992-07-10|DELIVER IN PERSON|MAIL|oxes. carefully regular deposits|
+1027|113|10|2|20|20262.20|0.01|0.02|A|F|1992-06-08|1992-08-29|1992-06-14|NONE|TRUCK|ar excuses eat f|
+1027|126|9|3|2|2052.24|0.01|0.02|R|F|1992-08-28|1992-07-09|1992-09-10|NONE|FOB|s. quickly unusual waters inside |
+1027|100|4|4|13|13001.30|0.08|0.01|R|F|1992-08-22|1992-07-10|1992-09-12|DELIVER IN PERSON|RAIL|ily ironic ideas use|
+1027|136|2|5|22|22794.86|0.02|0.00|A|F|1992-09-03|1992-08-14|1992-10-01|DELIVER IN PERSON|FOB|the furiously express ex|
+1027|105|8|6|10|10051.00|0.06|0.08|R|F|1992-08-28|1992-08-06|1992-09-03|COLLECT COD|REG AIR|ilent, express foxes near the blithely sp|
+1028|128|3|1|2|2056.24|0.09|0.03|A|F|1994-01-10|1994-03-22|1994-01-26|COLLECT COD|FOB|s alongside of the regular asymptotes sleep|
+1028|112|9|2|39|39472.29|0.06|0.05|R|F|1994-02-18|1994-03-22|1994-03-06|TAKE BACK RETURN|MAIL| final dependencies affix a|
+1028|100|3|3|8|8000.80|0.03|0.07|A|F|1994-02-14|1994-03-28|1994-02-22|NONE|AIR|e carefully final packages. furiously fi|
+1028|32|8|4|26|24232.78|0.07|0.02|A|F|1994-03-18|1994-02-08|1994-03-19|TAKE BACK RETURN|RAIL|ronic platelets. carefully f|
+1028|29|2|5|27|25083.54|0.00|0.04|A|F|1994-04-03|1994-02-07|1994-04-26|NONE|REG AIR|ial accounts nag. slyly|
+1028|26|1|6|39|36114.78|0.03|0.02|A|F|1994-02-27|1994-02-16|1994-03-02|DELIVER IN PERSON|AIR|c theodoli|
+1028|31|2|7|22|20482.66|0.03|0.00|R|F|1994-04-24|1994-02-27|1994-05-08|NONE|REG AIR| Tiresias alongside of the carefully spec|
+1029|137|3|1|45|46670.85|0.05|0.07|R|F|1994-07-21|1994-08-30|1994-07-29|TAKE BACK RETURN|FOB|sits boost blithely|
+1030|65|10|1|17|16406.02|0.06|0.06|R|F|1994-10-13|1994-08-01|1994-11-10|DELIVER IN PERSON|RAIL|ly. carefully even packages dazz|
+1031|46|7|1|15|14190.60|0.10|0.08|A|F|1994-11-07|1994-10-29|1994-11-09|TAKE BACK RETURN|FOB|about the carefully bold a|
+1031|165|4|2|28|29824.48|0.05|0.01|A|F|1994-12-10|1994-10-29|1994-12-18|COLLECT COD|FOB|ly ironic accounts across the q|
+1031|187|8|3|27|29353.86|0.07|0.02|R|F|1994-09-20|1994-10-18|1994-10-10|DELIVER IN PERSON|SHIP|gular deposits cajole. blithely unus|
+1031|88|9|4|7|6916.56|0.03|0.03|R|F|1994-12-07|1994-11-11|1994-12-30|COLLECT COD|FOB|r instructions. car|
+1031|191|5|5|44|48012.36|0.01|0.07|R|F|1994-11-20|1994-11-24|1994-12-11|NONE|AIR|re slyly above the furio|
+1056|121|6|1|37|37781.44|0.04|0.06|R|F|1995-02-18|1995-04-01|1995-03-20|NONE|TRUCK| special packages. qui|
+1057|193|5|1|29|31702.51|0.10|0.01|A|F|1992-05-05|1992-05-05|1992-06-03|TAKE BACK RETURN|SHIP|es wake according to the q|
+1057|169|8|2|11|11760.76|0.00|0.02|R|F|1992-03-31|1992-04-18|1992-04-18|COLLECT COD|AIR|yly final theodolites. furi|
+1057|85|6|3|21|20686.68|0.03|0.04|A|F|1992-02-28|1992-05-01|1992-03-10|NONE|REG AIR|ar orbits boost bli|
+1057|182|3|4|20|21643.60|0.06|0.03|R|F|1992-03-02|1992-05-19|1992-03-13|DELIVER IN PERSON|TRUCK|s wake bol|
+1057|97|1|5|7|6979.63|0.06|0.05|R|F|1992-06-05|1992-04-30|1992-06-20|NONE|TRUCK|y slyly express theodolites. slyly bo|
+1057|52|7|6|19|18088.95|0.04|0.07|A|F|1992-05-31|1992-05-09|1992-06-02|DELIVER IN PERSON|FOB|r-- packages haggle alon|
+1058|140|6|1|24|24963.36|0.08|0.04|A|F|1993-07-09|1993-05-28|1993-07-22|DELIVER IN PERSON|TRUCK|fully ironic accounts. express accou|
+1058|89|10|2|5|4945.40|0.04|0.07|R|F|1993-05-11|1993-05-29|1993-05-27|COLLECT COD|TRUCK|refully even requests boost along|
+1058|90|1|3|44|43563.96|0.10|0.01|R|F|1993-06-26|1993-06-21|1993-07-20|COLLECT COD|TRUCK|uriously f|
+1058|5|2|4|25|22625.00|0.09|0.01|A|F|1993-05-27|1993-06-10|1993-06-20|TAKE BACK RETURN|MAIL| the final requests believe carefully |
+1059|178|9|1|16|17250.72|0.07|0.02|A|F|1994-04-24|1994-03-31|1994-04-28|DELIVER IN PERSON|SHIP|y ironic pinto |
+1059|29|2|2|7|6503.14|0.07|0.06|R|F|1994-03-30|1994-04-01|1994-04-24|DELIVER IN PERSON|MAIL|the furiously silent excuses are e|
+1059|88|9|3|45|44463.60|0.00|0.02|R|F|1994-06-10|1994-05-08|1994-06-21|COLLECT COD|RAIL|riously even theodolites. slyly regula|
+1059|110|7|4|26|26262.86|0.09|0.01|A|F|1994-03-17|1994-04-18|1994-03-26|DELIVER IN PERSON|TRUCK|ar pinto beans at the furiously |
+1059|139|5|5|37|38447.81|0.09|0.04|R|F|1994-03-31|1994-05-08|1994-04-06|COLLECT COD|RAIL| packages lose in place of the slyly unusu|
+1059|190|1|6|50|54509.50|0.00|0.03|A|F|1994-06-15|1994-05-11|1994-06-29|NONE|MAIL|s impress furiously about|
+1059|123|4|7|13|13300.56|0.01|0.03|R|F|1994-06-12|1994-05-11|1994-07-02|COLLECT COD|TRUCK|usly regular theodo|
+1060|196|10|1|8|8769.52|0.07|0.04|R|F|1993-05-21|1993-05-06|1993-06-10|DELIVER IN PERSON|FOB|iously. furiously regular in|
+1060|8|5|2|26|23608.00|0.06|0.08|R|F|1993-04-12|1993-04-01|1993-04-20|DELIVER IN PERSON|TRUCK|counts; even deposits are carefull|
+1060|164|3|3|11|11705.76|0.01|0.07|A|F|1993-05-13|1993-05-08|1993-05-17|TAKE BACK RETURN|MAIL|e regular deposits: re|
+1060|110|7|4|16|16161.76|0.03|0.06|A|F|1993-06-15|1993-04-18|1993-07-05|COLLECT COD|SHIP|ccounts. foxes maintain care|
+1060|53|8|5|1|953.05|0.04|0.06|A|F|1993-06-19|1993-05-10|1993-06-21|COLLECT COD|RAIL|posits detect carefully abo|
+1060|72|2|6|26|25273.82|0.01|0.03|A|F|1993-02-28|1993-04-01|1993-03-09|TAKE BACK RETURN|FOB|quickly abo|
+1060|121|10|7|36|36760.32|0.09|0.01|R|F|1993-03-14|1993-03-24|1993-04-02|TAKE BACK RETURN|FOB|r the quickly|
+1061|151|6|1|7|7358.05|0.04|0.04|N|O|1998-08-09|1998-08-12|1998-08-16|COLLECT COD|FOB|es are slyly expr|
+1061|119|3|2|2|2038.22|0.06|0.02|N|O|1998-08-15|1998-08-05|1998-08-24|COLLECT COD|MAIL|. regular accounts impre|
+1061|111|8|3|26|26288.86|0.08|0.02|N|O|1998-06-18|1998-07-25|1998-06-24|TAKE BACK RETURN|AIR|ave to slee|
+1061|136|7|4|41|42481.33|0.00|0.05|N|O|1998-06-29|1998-07-02|1998-07-27|NONE|MAIL|s are. ironic theodolites cajole. dep|
+1061|131|2|5|50|51556.50|0.04|0.08|N|O|1998-05-25|1998-07-22|1998-06-22|COLLECT COD|AIR|nding excuses are around the e|
+1061|144|1|6|35|36544.90|0.09|0.05|N|O|1998-07-05|1998-07-07|1998-07-30|TAKE BACK RETURN|MAIL|ending requests nag careful|
+1062|137|8|1|38|39410.94|0.00|0.01|N|O|1997-01-27|1997-03-07|1997-02-16|DELIVER IN PERSON|TRUCK|deas. pending acc|
+1063|96|9|1|42|41835.78|0.03|0.02|A|F|1994-07-10|1994-05-25|1994-07-26|NONE|RAIL|tructions about the blithely ex|
+1088|107|8|1|30|30213.00|0.07|0.03|A|F|1992-05-22|1992-06-25|1992-06-11|TAKE BACK RETURN|SHIP|long the packages snooze careful|
+1088|37|3|2|11|10307.33|0.06|0.00|A|F|1992-08-30|1992-07-25|1992-09-10|TAKE BACK RETURN|AIR|inal requests. fluffily express theod|
+1088|181|2|3|5|5405.90|0.03|0.07|A|F|1992-07-01|1992-07-25|1992-07-02|NONE|AIR|refully ironic packages. r|
+1088|124|5|4|3|3072.36|0.09|0.03|A|F|1992-06-15|1992-08-02|1992-06-18|DELIVER IN PERSON|MAIL|pecial theodolites |
+1089|151|3|1|47|49404.05|0.05|0.06|N|O|1996-06-26|1996-06-25|1996-07-11|NONE|TRUCK|aggle furiously among the bravely eve|
+1089|50|7|2|35|33251.75|0.03|0.00|N|O|1996-08-14|1996-07-10|1996-08-26|NONE|TRUCK|ly express deposits haggle|
+1089|26|7|3|23|21298.46|0.10|0.05|N|O|1996-06-24|1996-07-25|1996-07-20|DELIVER IN PERSON|AIR|g dolphins. deposits integrate. s|
+1089|141|10|4|1|1041.14|0.01|0.03|N|O|1996-07-08|1996-07-07|1996-07-17|COLLECT COD|RAIL|n courts among the caref|
+1090|22|3|1|5|4610.10|0.02|0.05|N|O|1998-02-19|1997-12-25|1998-02-24|DELIVER IN PERSON|AIR|s above the |
+1090|113|10|2|28|28367.08|0.08|0.08|N|O|1998-02-20|1998-01-03|1998-03-19|NONE|FOB|s cajole above the regular|
+1091|38|9|1|40|37521.20|0.10|0.06|N|O|1996-12-17|1996-10-14|1996-12-24|TAKE BACK RETURN|REG AIR|platelets. regular packag|
+1092|184|5|1|48|52040.64|0.04|0.04|N|O|1995-06-25|1995-04-06|1995-07-18|DELIVER IN PERSON|AIR|unusual accounts. fluffi|
+1092|153|5|2|1|1053.15|0.01|0.06|A|F|1995-03-10|1995-04-21|1995-04-06|COLLECT COD|RAIL|lent, pending requests-- requests nag accor|
+1092|161|8|3|28|29712.48|0.05|0.08|R|F|1995-04-08|1995-05-01|1995-05-02|DELIVER IN PERSON|FOB|affix carefully. u|
+1092|86|7|4|2|1972.16|0.05|0.07|R|F|1995-04-09|1995-05-12|1995-05-03|TAKE BACK RETURN|TRUCK|ans. slyly eve|
+1093|87|8|1|7|6909.56|0.04|0.02|N|O|1997-11-24|1997-09-23|1997-11-25|TAKE BACK RETURN|SHIP|bold deposits. blithely ironic depos|
+1093|177|5|2|37|39855.29|0.08|0.04|N|O|1997-11-06|1997-10-08|1997-11-22|COLLECT COD|FOB|le furiously across the carefully sp|
+1093|61|2|3|34|32676.04|0.01|0.06|N|O|1997-11-07|1997-09-06|1997-11-28|TAKE BACK RETURN|REG AIR|sits. express accounts play carefully. bol|
+1094|115|6|1|9|9135.99|0.07|0.06|N|O|1997-12-28|1998-03-16|1998-01-18|DELIVER IN PERSON|AIR|as. slyly pe|
+1095|137|3|1|33|34225.29|0.01|0.02|N|O|1995-10-03|1995-09-22|1995-10-13|NONE|MAIL|slyly around the iron|
+1095|136|2|2|24|24867.12|0.04|0.06|N|O|1995-08-24|1995-10-20|1995-09-09|COLLECT COD|TRUCK|packages nod furiously above the carefully |
+1095|156|4|3|13|13729.95|0.06|0.01|N|O|1995-08-24|1995-10-19|1995-09-02|TAKE BACK RETURN|REG AIR|ously even accounts. slyly bold a|
+1095|135|1|4|28|28983.64|0.08|0.03|N|O|1995-09-20|1995-11-18|1995-10-02|DELIVER IN PERSON|SHIP| regular pac|
+1095|112|2|5|40|40484.40|0.09|0.03|N|O|1995-10-18|1995-11-14|1995-11-09|NONE|MAIL| bold accounts haggle slyly furiously even|
+1095|181|2|6|37|40003.66|0.07|0.08|N|O|1995-10-04|1995-11-13|1995-10-12|NONE|SHIP|. quickly even dolphins sle|
+1120|178|8|1|10|10781.70|0.08|0.05|N|O|1997-12-17|1998-01-21|1997-12-23|DELIVER IN PERSON|MAIL|dependencies. blithel|
+1120|20|1|2|49|45080.98|0.01|0.07|N|O|1998-01-03|1998-02-02|1998-01-09|TAKE BACK RETURN|RAIL|heodolites. quick re|
+1120|76|6|3|21|20497.47|0.06|0.01|N|O|1998-01-11|1998-02-04|1998-01-19|COLLECT COD|REG AIR|s: fluffily even packages c|
+1120|46|9|4|22|20812.88|0.09|0.08|N|O|1997-11-15|1998-01-25|1997-12-07|TAKE BACK RETURN|REG AIR|ons. slyly silent requests sleep silent|
+1120|83|4|5|10|9830.80|0.07|0.08|N|O|1997-11-10|1998-02-01|1997-11-28|TAKE BACK RETURN|AIR|ages haggle furiously |
+1121|168|3|1|42|44862.72|0.04|0.05|N|O|1997-03-05|1997-03-18|1997-03-14|DELIVER IN PERSON|SHIP|nts are slyly special packages. f|
+1121|161|10|2|27|28651.32|0.08|0.00|N|O|1997-05-08|1997-03-28|1997-05-14|NONE|MAIL|ly ironic accounts cajole slyly abou|
+1121|157|5|3|10|10571.50|0.00|0.04|N|O|1997-04-17|1997-03-18|1997-05-02|TAKE BACK RETURN|RAIL|dencies. quickly regular theodolites n|
+1121|166|1|4|29|30918.64|0.02|0.01|N|O|1997-03-07|1997-04-02|1997-04-01|DELIVER IN PERSON|REG AIR| use furiously. quickly silent package|
+1121|30|9|5|47|43711.41|0.09|0.03|N|O|1997-04-27|1997-03-28|1997-05-14|COLLECT COD|FOB|ly idle, i|
+1121|200|1|6|50|55010.00|0.06|0.03|N|O|1997-04-21|1997-02-16|1997-04-25|NONE|TRUCK|odolites. slyly even accounts|
+1121|80|8|7|37|36262.96|0.06|0.01|N|O|1997-02-27|1997-03-04|1997-03-02|COLLECT COD|RAIL|special packages. fluffily final requests s|
+1122|92|6|1|8|7936.72|0.10|0.06|N|O|1997-02-02|1997-04-03|1997-02-22|TAKE BACK RETURN|RAIL|c foxes are along the slyly r|
+1122|182|3|2|29|31383.22|0.05|0.04|N|O|1997-05-07|1997-04-07|1997-05-15|COLLECT COD|SHIP|ptotes. quickl|
+1122|147|6|3|25|26178.50|0.09|0.01|N|O|1997-03-21|1997-03-03|1997-04-07|TAKE BACK RETURN|RAIL|d furiously. pinto |
+1122|106|9|4|40|40244.00|0.08|0.08|N|O|1997-02-07|1997-03-25|1997-02-25|NONE|REG AIR|packages sleep after the asym|
+1122|151|2|5|15|15767.25|0.05|0.03|N|O|1997-04-15|1997-03-15|1997-05-07|COLLECT COD|SHIP|olve blithely regular, |
+1122|162|7|6|24|25491.84|0.04|0.01|N|O|1997-03-08|1997-02-20|1997-04-05|NONE|RAIL|blithely requests. slyly pending r|
+1122|1|6|7|38|34238.00|0.00|0.08|N|O|1997-01-23|1997-04-02|1997-02-16|NONE|TRUCK|t theodolites sleep. even, ironic|
+1123|12|2|1|10|9120.10|0.05|0.08|N|O|1996-11-12|1996-10-04|1996-11-30|NONE|MAIL|ckages are above the depths. slyly ir|
+1123|178|8|2|39|42048.63|0.03|0.08|N|O|1996-08-25|1996-10-21|1996-09-04|DELIVER IN PERSON|REG AIR|rding to the furiously ironic requests: r|
+1123|101|4|3|38|38041.80|0.03|0.08|N|O|1996-09-23|1996-10-04|1996-09-27|DELIVER IN PERSON|FOB| blithely carefully unusual reques|
+1124|198|2|1|1|1098.19|0.09|0.08|N|O|1998-10-06|1998-10-02|1998-10-30|NONE|REG AIR| instructions cajole qu|
+1124|6|1|2|13|11778.00|0.05|0.04|N|O|1998-09-05|1998-10-03|1998-09-30|DELIVER IN PERSON|SHIP|t the slyly |
+1124|93|5|3|35|34758.15|0.10|0.05|N|O|1998-11-25|1998-10-08|1998-12-25|TAKE BACK RETURN|AIR|ut the slyly bold pinto beans; fi|
+1124|50|1|4|25|23751.25|0.08|0.05|N|O|1998-08-05|1998-10-14|1998-08-11|NONE|MAIL|ggle slyly according|
+1124|75|5|5|33|32177.31|0.05|0.04|N|O|1998-10-19|1998-09-17|1998-10-26|TAKE BACK RETURN|SHIP|eposits sleep slyly. stealthily f|
+1124|27|6|6|43|39861.86|0.01|0.03|N|O|1998-09-19|1998-10-28|1998-10-10|COLLECT COD|MAIL|across the |
+1124|95|6|7|1|995.09|0.09|0.01|N|O|1998-10-07|1998-08-31|1998-10-12|NONE|TRUCK|ly bold accou|
+1125|133|4|1|4|4132.52|0.08|0.02|A|F|1994-12-10|1994-12-28|1994-12-30|NONE|MAIL| quickly express packages a|
+1125|138|9|2|24|24915.12|0.10|0.03|R|F|1995-01-31|1994-12-02|1995-02-20|COLLECT COD|AIR|es about the slyly s|
+1125|122|7|3|26|26575.12|0.05|0.04|A|F|1995-02-24|1995-01-18|1995-03-05|COLLECT COD|TRUCK|l instruction|
+1125|98|1|4|29|28944.61|0.06|0.00|A|F|1994-11-29|1994-12-20|1994-12-10|DELIVER IN PERSON|RAIL| platelets wake against the carefully i|
+1126|36|2|1|44|41185.32|0.08|0.03|N|O|1998-05-07|1998-04-02|1998-05-29|NONE|TRUCK|es. carefully special|
+1126|58|3|2|7|6706.35|0.06|0.01|N|O|1998-05-02|1998-03-22|1998-05-21|COLLECT COD|MAIL|ons. final, unusual|
+1126|147|10|3|14|14659.96|0.07|0.07|N|O|1998-04-17|1998-04-15|1998-05-12|DELIVER IN PERSON|TRUCK|nstructions. blithe|
+1127|43|10|1|35|33006.40|0.02|0.03|N|O|1995-11-25|1995-11-03|1995-12-17|NONE|TRUCK|l instructions boost blithely according |
+1127|110|5|2|38|38384.18|0.09|0.05|N|O|1995-11-07|1995-11-11|1995-11-26|DELIVER IN PERSON|RAIL|. never final packages boost acro|
+1127|20|1|3|29|26680.58|0.09|0.07|N|O|1995-09-20|1995-11-21|1995-10-11|DELIVER IN PERSON|REG AIR|y. blithely r|
+1127|175|6|4|7|7526.19|0.07|0.05|N|O|1995-11-05|1995-11-02|1995-11-11|DELIVER IN PERSON|FOB| idly pending pains |
+1152|9|10|1|23|20907.00|0.06|0.04|A|F|1994-10-14|1994-10-22|1994-10-21|DELIVER IN PERSON|MAIL|equests alongside of the unusual |
+1152|100|2|2|25|25002.50|0.04|0.08|R|F|1994-10-20|1994-09-18|1994-10-28|DELIVER IN PERSON|REG AIR|efully ironic accounts. sly instructions wa|
+1152|42|9|3|6|5652.24|0.07|0.03|A|F|1994-12-07|1994-11-05|1994-12-25|DELIVER IN PERSON|FOB|p furiously; packages above th|
+1153|86|7|1|15|14791.20|0.00|0.08|N|O|1996-04-24|1996-07-17|1996-04-29|TAKE BACK RETURN|SHIP|uctions boost fluffily according to|
+1153|169|8|2|50|53458.00|0.00|0.07|N|O|1996-06-27|1996-07-13|1996-07-05|COLLECT COD|REG AIR|ronic asymptotes nag slyly. |
+1153|44|5|3|25|23601.00|0.00|0.05|N|O|1996-06-18|1996-06-28|1996-07-09|NONE|TRUCK| theodolites|
+1153|92|3|4|43|42659.87|0.01|0.00|N|O|1996-06-09|1996-06-01|1996-07-04|DELIVER IN PERSON|MAIL|special instructions are. unusual, final du|
+1153|142|5|5|45|46896.30|0.00|0.02|N|O|1996-06-18|1996-06-20|1996-07-03|TAKE BACK RETURN|AIR|oss the ex|
+1153|136|7|6|26|26939.38|0.02|0.03|N|O|1996-08-16|1996-07-12|1996-09-08|NONE|MAIL|kages haggle carefully. f|
+1153|192|4|7|5|5460.95|0.02|0.03|N|O|1996-05-03|1996-06-12|1996-05-28|TAKE BACK RETURN|FOB|special excuses promi|
+1154|143|10|1|31|32337.34|0.06|0.06|A|F|1992-04-17|1992-04-26|1992-05-17|COLLECT COD|AIR|ithely. final, blithe |
+1154|148|7|2|50|52407.00|0.07|0.06|A|F|1992-04-22|1992-04-21|1992-05-01|NONE|TRUCK|ove the furiously bold Tires|
+1154|97|1|3|5|4985.45|0.09|0.04|A|F|1992-06-07|1992-05-07|1992-07-05|DELIVER IN PERSON|MAIL|the furiously |
+1154|1|2|4|35|31535.00|0.00|0.07|A|F|1992-03-30|1992-04-02|1992-04-21|DELIVER IN PERSON|TRUCK|the carefully regular pinto beans boost|
+1154|36|2|5|18|16848.54|0.02|0.03|A|F|1992-02-26|1992-03-24|1992-03-20|TAKE BACK RETURN|REG AIR|y regular excuses cajole blithely. fi|
+1154|196|8|6|50|54809.50|0.06|0.03|A|F|1992-03-04|1992-04-01|1992-04-01|TAKE BACK RETURN|TRUCK| even, special |
+1155|70|1|1|4|3880.28|0.09|0.05|N|O|1997-10-19|1997-12-09|1997-11-02|DELIVER IN PERSON|SHIP|ic foxes according to the carefully final |
+1155|196|9|2|39|42751.41|0.08|0.05|N|O|1998-01-29|1998-01-03|1998-02-01|COLLECT COD|TRUCK|ckly final pinto beans was.|
+1155|147|4|3|23|24084.22|0.08|0.03|N|O|1997-11-24|1997-11-28|1997-12-06|DELIVER IN PERSON|FOB|ly unusual packages. iro|
+1155|140|1|4|12|12481.68|0.01|0.06|N|O|1997-11-01|1998-01-03|1997-11-19|DELIVER IN PERSON|RAIL|packages do|
+1155|5|2|5|49|44345.00|0.04|0.08|N|O|1997-12-07|1997-12-30|1997-12-08|NONE|AIR|ccounts are alongside of t|
+1156|87|8|1|15|14806.20|0.07|0.06|N|O|1996-12-21|1997-01-03|1997-01-10|TAKE BACK RETURN|AIR|the furiously pen|
+1156|33|4|2|21|19593.63|0.02|0.08|N|O|1996-11-07|1997-01-14|1996-12-03|NONE|AIR|dolphins. fluffily ironic packages sleep re|
+1156|12|2|3|29|26448.29|0.09|0.06|N|O|1997-01-24|1996-12-26|1997-02-04|DELIVER IN PERSON|TRUCK|ts sleep sly|
+1156|172|3|4|42|45031.14|0.02|0.00|N|O|1997-01-18|1997-01-12|1997-02-13|NONE|REG AIR|s. quickly bold pains are|
+1156|74|4|5|49|47729.43|0.04|0.01|N|O|1996-11-16|1996-12-02|1996-12-05|COLLECT COD|AIR|ithely unusual in|
+1156|195|9|6|42|45997.98|0.02|0.06|N|O|1997-01-27|1997-01-09|1997-01-28|DELIVER IN PERSON|MAIL|even requests boost ironic deposits. pe|
+1156|47|6|7|20|18940.80|0.08|0.07|N|O|1997-01-01|1997-01-06|1997-01-16|COLLECT COD|MAIL|deposits sleep bravel|
+1157|49|2|1|16|15184.64|0.06|0.00|N|O|1998-04-12|1998-03-09|1998-04-23|DELIVER IN PERSON|MAIL|tions hang|
+1157|83|4|2|4|3932.32|0.10|0.05|N|O|1998-02-24|1998-03-30|1998-03-24|DELIVER IN PERSON|SHIP|ounts. ironic deposits|
+1157|48|7|3|8|7584.32|0.02|0.00|N|O|1998-03-25|1998-03-16|1998-03-29|NONE|REG AIR|blithely even pa|
+1157|77|8|4|46|44945.22|0.07|0.08|N|O|1998-04-19|1998-03-13|1998-04-23|NONE|FOB|slyly regular excuses. accounts|
+1157|160|5|5|14|14842.24|0.03|0.03|N|O|1998-04-17|1998-03-03|1998-05-01|NONE|FOB|theodolites. fluffily re|
+1158|45|2|1|5|4725.20|0.02|0.04|N|O|1996-10-20|1996-07-30|1996-11-14|COLLECT COD|AIR|symptotes along the care|
+1158|157|9|2|23|24314.45|0.00|0.08|N|O|1996-10-21|1996-08-19|1996-10-31|COLLECT COD|MAIL|ularly ironic requests use care|
+1159|109|10|1|39|39354.90|0.01|0.00|A|F|1992-11-20|1992-10-28|1992-12-18|TAKE BACK RETURN|FOB| blithely express reques|
+1159|96|9|2|7|6972.63|0.08|0.00|A|F|1992-11-25|1992-10-27|1992-12-20|NONE|AIR|olve somet|
+1159|98|10|3|11|10978.99|0.10|0.03|R|F|1992-12-09|1992-12-07|1992-12-18|DELIVER IN PERSON|MAIL|h furiousl|
+1184|47|4|1|27|25570.08|0.01|0.00|N|O|1998-01-10|1997-12-02|1998-02-06|TAKE BACK RETURN|REG AIR|s wake fluffily. fl|
+1184|147|10|2|4|4188.56|0.04|0.03|N|O|1997-12-25|1998-01-24|1998-01-18|DELIVER IN PERSON|RAIL| express packages. slyly expres|
+1184|164|5|3|7|7449.12|0.05|0.00|N|O|1998-02-14|1998-01-06|1998-03-11|COLLECT COD|TRUCK|ckly warthogs. blithely bold foxes hag|
+1184|126|9|4|3|3078.36|0.02|0.05|N|O|1998-01-15|1997-12-19|1998-02-02|NONE|REG AIR|ar packages. final packages cajol|
+1185|72|1|1|8|7776.56|0.01|0.06|A|F|1992-12-05|1992-10-05|1992-12-28|DELIVER IN PERSON|MAIL|ely according to the furiously regular r|
+1185|31|2|2|28|26068.84|0.07|0.06|A|F|1992-09-24|1992-10-07|1992-10-10|DELIVER IN PERSON|REG AIR|ke. slyly regular t|
+1185|190|1|3|12|13082.28|0.05|0.06|R|F|1992-10-12|1992-09-26|1992-11-11|NONE|REG AIR|instructions. daringly pend|
+1186|3|4|1|28|25284.00|0.08|0.07|N|O|1996-12-08|1996-10-17|1996-12-15|TAKE BACK RETURN|TRUCK|ffily spec|
+1186|92|5|2|11|10912.99|0.07|0.05|N|O|1996-10-03|1996-10-21|1996-10-17|DELIVER IN PERSON|AIR|s haggle furiously; slyl|
+1186|101|2|3|20|20022.00|0.07|0.07|N|O|1996-08-20|1996-10-23|1996-09-05|COLLECT COD|FOB|ely alongside of the blithel|
+1186|106|7|4|27|27164.70|0.06|0.04|N|O|1996-10-08|1996-11-06|1996-10-09|TAKE BACK RETURN|SHIP|accounts. express, e|
+1187|178|6|1|29|31266.93|0.01|0.04|R|F|1992-12-10|1993-02-09|1992-12-29|TAKE BACK RETURN|RAIL|riously express ac|
+1187|131|7|2|15|15466.95|0.03|0.04|A|F|1992-12-22|1993-01-13|1993-01-01|NONE|TRUCK|ests. foxes wake. carefu|
+1187|78|8|3|40|39122.80|0.08|0.06|R|F|1993-03-05|1992-12-31|1993-03-12|NONE|TRUCK|ar, brave deposits nag blithe|
+1188|115|9|1|2|2030.22|0.00|0.04|N|O|1996-05-22|1996-05-23|1996-06-06|COLLECT COD|RAIL|its breach blit|
+1188|113|4|2|9|9117.99|0.01|0.08|N|O|1996-08-04|1996-06-04|1996-08-19|NONE|REG AIR|ow carefully ironic d|
+1188|179|10|3|41|44245.97|0.07|0.04|N|O|1996-06-29|1996-05-21|1996-07-21|TAKE BACK RETURN|TRUCK|althy packages. fluffily unusual ideas h|
+1189|51|2|1|23|21874.15|0.06|0.00|R|F|1994-07-25|1994-06-07|1994-08-02|COLLECT COD|FOB|s. fluffy Tiresias run quickly. bra|
+1189|105|2|2|32|32163.20|0.09|0.02|R|F|1994-05-06|1994-07-03|1994-05-15|TAKE BACK RETURN|FOB|e regular deposits. quickly quiet deposi|
+1189|57|5|3|22|21055.10|0.05|0.03|R|F|1994-06-09|1994-06-29|1994-06-23|DELIVER IN PERSON|TRUCK|quickly unusual platelets lose forges. ca|
+1190|84|5|1|32|31490.56|0.07|0.06|N|O|1997-05-08|1997-04-17|1997-06-01|COLLECT COD|FOB|y final packages? slyly even|
+1191|49|6|1|29|27522.16|0.00|0.04|N|O|1996-01-24|1996-01-28|1996-02-17|COLLECT COD|AIR| regular pin|
+1216|97|1|1|8|7976.72|0.03|0.04|R|F|1993-02-01|1993-03-06|1993-02-08|TAKE BACK RETURN|TRUCK| of the carefully express|
+1216|75|3|2|48|46803.36|0.10|0.01|R|F|1993-01-17|1993-02-01|1993-02-13|COLLECT COD|SHIP|symptotes use against th|
+1216|42|3|3|18|16956.72|0.00|0.03|A|F|1993-01-20|1993-01-28|1993-02-02|COLLECT COD|MAIL|y final packages nod |
+1217|60|5|1|45|43202.70|0.07|0.02|A|F|1992-07-01|1992-06-23|1992-07-06|COLLECT COD|AIR|riously close ideas|
+1218|140|6|1|16|16642.24|0.04|0.07|A|F|1994-06-26|1994-08-07|1994-06-30|TAKE BACK RETURN|FOB|ven realms be|
+1218|94|6|2|41|40757.69|0.06|0.06|R|F|1994-08-04|1994-08-05|1994-08-11|TAKE BACK RETURN|SHIP|dolphins. theodolites beyond th|
+1218|48|7|3|44|41713.76|0.07|0.06|A|F|1994-10-05|1994-09-03|1994-10-30|COLLECT COD|TRUCK|thely ironic accounts wake slyly|
+1218|42|9|4|1|942.04|0.01|0.08|R|F|1994-09-15|1994-09-07|1994-10-03|COLLECT COD|TRUCK|press furio|
+1219|132|3|1|6|6192.78|0.08|0.04|N|O|1995-11-13|1995-12-24|1995-11-18|NONE|MAIL|pecial, ironic requ|
+1219|129|4|2|4|4116.48|0.01|0.04|N|O|1995-11-24|1995-11-22|1995-12-07|TAKE BACK RETURN|SHIP|lly quick requests. blithely even h|
+1220|169|4|1|25|26729.00|0.10|0.03|N|O|1996-10-15|1996-11-07|1996-11-06|COLLECT COD|REG AIR| regular orbi|
+1220|160|5|2|36|38165.76|0.01|0.02|N|O|1996-12-10|1996-11-14|1997-01-07|COLLECT COD|SHIP|ar packages. blithely final acc|
+1220|37|8|3|3|2811.09|0.08|0.06|N|O|1996-09-06|1996-11-03|1996-09-10|COLLECT COD|REG AIR| final theodolites. blithely silent |
+1220|6|1|4|36|32616.00|0.07|0.03|N|O|1996-12-12|1996-10-03|1996-12-15|TAKE BACK RETURN|TRUCK|unusual, silent pinto beans aga|
+1220|49|2|5|25|23726.00|0.03|0.08|N|O|1996-09-11|1996-10-09|1996-09-25|DELIVER IN PERSON|RAIL|packages affi|
+1221|81|2|1|43|42186.44|0.05|0.05|R|F|1992-06-22|1992-07-15|1992-07-20|DELIVER IN PERSON|FOB|y slyly above the slyly unusual ideas|
+1221|170|1|2|12|12842.04|0.00|0.08|R|F|1992-08-07|1992-06-24|1992-08-13|COLLECT COD|AIR|yly ironic |
+1221|69|6|3|3|2907.18|0.10|0.08|R|F|1992-07-01|1992-06-04|1992-07-27|COLLECT COD|TRUCK|ing to the fluffily|
+1221|120|10|4|41|41824.92|0.06|0.02|A|F|1992-04-28|1992-07-02|1992-05-19|NONE|RAIL|ns. bold deposit|
+1221|108|1|5|13|13105.30|0.10|0.00|R|F|1992-08-01|1992-06-29|1992-08-27|TAKE BACK RETURN|AIR|ajole furiously. blithely expres|
+1221|85|6|6|7|6895.56|0.08|0.06|A|F|1992-06-27|1992-06-16|1992-07-23|TAKE BACK RETURN|RAIL|xpress accounts |
+1222|72|10|1|12|11664.84|0.09|0.02|A|F|1993-02-12|1993-03-14|1993-03-12|TAKE BACK RETURN|RAIL|s print permanently unusual packages. |
+1222|159|7|2|12|12709.80|0.08|0.01|A|F|1993-05-05|1993-03-27|1993-05-18|TAKE BACK RETURN|REG AIR| furiously bold instructions|
+1222|8|1|3|26|23608.00|0.02|0.08|R|F|1993-02-13|1993-03-20|1993-02-22|TAKE BACK RETURN|MAIL|, even accounts are ironic|
+1223|100|1|1|28|28002.80|0.10|0.06|N|O|1996-08-07|1996-07-24|1996-08-13|TAKE BACK RETURN|MAIL| quickly ironic requests. furious|
+1248|164|5|1|45|47887.20|0.00|0.08|A|F|1992-04-17|1992-03-31|1992-05-13|NONE|RAIL|ter the pending pl|
+1248|151|9|2|37|38892.55|0.06|0.06|R|F|1992-01-26|1992-02-05|1992-02-13|COLLECT COD|TRUCK|. final requests integrate quickly. blit|
+1248|56|8|3|26|24857.30|0.09|0.06|A|F|1992-01-16|1992-03-01|1992-02-06|TAKE BACK RETURN|AIR| ironic dependen|
+1248|156|7|4|49|51751.35|0.02|0.01|A|F|1992-04-24|1992-02-18|1992-05-03|TAKE BACK RETURN|AIR|beans run quickly according to the carefu|
+1248|122|7|5|20|20442.40|0.08|0.00|A|F|1992-03-12|1992-03-23|1992-04-07|TAKE BACK RETURN|AIR|nal foxes cajole carefully slyl|
+1248|62|9|6|30|28861.80|0.10|0.01|R|F|1992-02-01|1992-03-24|1992-02-08|TAKE BACK RETURN|MAIL|fily special foxes kindle am|
+1249|59|4|1|49|46993.45|0.07|0.05|A|F|1994-03-03|1994-02-28|1994-03-08|NONE|RAIL|ffily express theodo|
+1250|2|3|1|15|13530.00|0.10|0.06|A|F|1992-11-05|1992-12-17|1992-12-03|TAKE BACK RETURN|SHIP| regular, i|
+1251|4|5|1|37|33448.00|0.08|0.08|N|O|1997-12-21|1998-01-12|1997-12-26|COLLECT COD|AIR|. furiously|
+1251|78|9|2|36|35210.52|0.07|0.04|N|O|1997-11-29|1998-01-07|1997-12-03|TAKE BACK RETURN|RAIL|y ironic Tiresias are slyly furio|
+1251|99|3|3|37|36966.33|0.09|0.02|N|O|1998-01-11|1997-12-01|1998-01-23|DELIVER IN PERSON|RAIL|finally bold requests|
+1251|150|9|4|7|7351.05|0.07|0.00|N|O|1998-01-08|1997-12-27|1998-01-18|COLLECT COD|MAIL|riously pe|
+1251|188|9|5|1|1088.18|0.02|0.03|N|O|1997-12-08|1998-01-06|1998-01-01|DELIVER IN PERSON|REG AIR| use quickly final packages. iron|
+1252|87|8|1|13|12832.04|0.10|0.01|N|O|1997-09-07|1997-09-12|1997-10-01|COLLECT COD|REG AIR|sts dazzle|
+1252|111|8|2|27|27299.97|0.00|0.08|N|O|1997-10-22|1997-10-10|1997-11-10|TAKE BACK RETURN|REG AIR|packages hag|
+1252|40|1|3|19|17860.76|0.07|0.02|N|O|1997-10-13|1997-10-23|1997-10-18|NONE|AIR|ts wake carefully-- packages sleep. quick |
+1252|92|4|4|11|10912.99|0.10|0.01|N|O|1997-10-16|1997-09-22|1997-10-28|COLLECT COD|AIR|s are. slyly final requests among the|
+1252|79|10|5|26|25455.82|0.05|0.05|N|O|1997-08-05|1997-10-24|1997-08-07|DELIVER IN PERSON|SHIP|onic pinto beans haggle furiously |
+1253|180|8|1|14|15122.52|0.00|0.06|R|F|1993-04-03|1993-04-16|1993-04-27|TAKE BACK RETURN|MAIL|lar foxes sleep furiously final, final pack|
+1253|54|9|2|13|12402.65|0.01|0.06|A|F|1993-03-05|1993-04-26|1993-03-08|DELIVER IN PERSON|FOB|al packages|
+1253|70|1|3|22|21341.54|0.05|0.06|A|F|1993-02-23|1993-04-06|1993-03-07|TAKE BACK RETURN|SHIP|telets cajole alongside of the final reques|
+1253|176|5|4|23|24751.91|0.09|0.02|R|F|1993-04-18|1993-04-18|1993-05-07|COLLECT COD|FOB| the slyly silent re|
+1253|114|8|5|19|19268.09|0.05|0.05|A|F|1993-04-01|1993-04-22|1993-04-14|TAKE BACK RETURN|AIR|al pinto bea|
+1254|193|5|1|6|6559.14|0.08|0.01|N|O|1996-02-02|1996-03-21|1996-02-29|NONE|REG AIR|lithely even deposits eat!|
+1254|200|3|2|47|51709.40|0.05|0.06|N|O|1996-03-07|1996-02-20|1996-04-05|COLLECT COD|MAIL| platelets cajol|
+1254|135|6|3|35|36229.55|0.05|0.06|N|O|1996-04-08|1996-02-29|1996-04-18|DELIVER IN PERSON|FOB|ckages boost. furious warhorses cajole|
+1255|192|4|1|12|13106.28|0.00|0.02|A|F|1994-08-17|1994-06-29|1994-09-04|TAKE BACK RETURN|REG AIR| regular, express accounts are |
+1255|194|8|2|46|50332.74|0.07|0.05|R|F|1994-07-06|1994-07-14|1994-08-05|NONE|MAIL|ons nag qui|
+1280|129|8|1|17|17495.04|0.01|0.01|A|F|1993-02-04|1993-04-10|1993-02-07|NONE|FOB|ructions integrate across the th|
+1280|189|10|2|6|6535.08|0.05|0.06|R|F|1993-03-30|1993-02-16|1993-04-18|DELIVER IN PERSON|AIR|gular deposits |
+1280|33|4|3|13|12129.39|0.03|0.02|R|F|1993-03-06|1993-03-11|1993-03-18|DELIVER IN PERSON|TRUCK|blithely final accounts use evenly |
+1280|175|3|4|5|5375.85|0.06|0.03|R|F|1993-02-03|1993-02-11|1993-02-23|DELIVER IN PERSON|AIR|beans haggle. quickly bold instructions h|
+1280|52|10|5|24|22849.20|0.07|0.02|R|F|1993-03-20|1993-03-01|1993-04-09|COLLECT COD|RAIL|y pending orbits boost after the slyly|
+1280|66|3|6|9|8694.54|0.00|0.05|R|F|1993-04-18|1993-03-28|1993-05-04|DELIVER IN PERSON|FOB|usual accou|
+1280|92|6|7|19|18849.71|0.02|0.06|A|F|1993-02-07|1993-02-28|1993-02-12|NONE|TRUCK|lyly along the furiously regular |
+1281|138|4|1|33|34258.29|0.07|0.08|R|F|1995-02-01|1995-01-18|1995-03-03|NONE|REG AIR|dencies. thinly final pinto beans wake|
+1281|7|2|2|37|33559.00|0.08|0.03|A|F|1995-03-19|1995-02-02|1995-03-27|NONE|AIR|ounts detect|
+1281|94|7|3|2|1988.18|0.05|0.06|A|F|1994-12-27|1995-01-26|1995-01-21|TAKE BACK RETURN|FOB|ly unusual requests. final reques|
+1281|154|2|4|38|40057.70|0.04|0.06|R|F|1995-03-28|1995-01-11|1995-04-14|TAKE BACK RETURN|MAIL| ideas-- blithely regular|
+1281|152|10|5|13|13677.95|0.03|0.07|A|F|1995-02-06|1995-02-13|1995-02-18|DELIVER IN PERSON|TRUCK|fully final platelets wa|
+1281|50|9|6|4|3800.20|0.07|0.04|R|F|1995-03-15|1995-02-21|1995-03-20|NONE|SHIP|ggle against the even requests. requests |
+1281|78|6|7|43|42057.01|0.10|0.02|R|F|1995-01-28|1995-02-08|1995-02-10|DELIVER IN PERSON|AIR|final accounts. final packages slee|
+1282|23|4|1|14|12922.28|0.04|0.02|R|F|1992-06-29|1992-04-05|1992-07-21|TAKE BACK RETURN|REG AIR|ecial deposit|
+1282|30|9|2|10|9300.30|0.09|0.06|R|F|1992-04-10|1992-04-16|1992-05-01|DELIVER IN PERSON|SHIP|r theodolite|
+1282|160|1|3|19|20143.04|0.01|0.03|R|F|1992-05-07|1992-04-07|1992-05-13|NONE|RAIL|ts x-ray across the furi|
+1282|59|10|4|19|18221.95|0.00|0.05|A|F|1992-06-20|1992-04-17|1992-07-05|DELIVER IN PERSON|REG AIR|nto beans. carefully close theodo|
+1283|93|7|1|47|46675.23|0.05|0.03|N|O|1996-10-21|1996-10-29|1996-11-12|DELIVER IN PERSON|TRUCK|even instructions boost slyly blithely |
+1283|106|1|2|1|1006.10|0.00|0.08|N|O|1996-10-07|1996-10-12|1996-10-08|NONE|RAIL|d the sauternes. slyly ev|
+1283|138|4|3|18|18686.34|0.02|0.01|N|O|1996-10-14|1996-11-07|1996-10-22|DELIVER IN PERSON|AIR|equests use along the fluff|
+1283|192|4|4|40|43687.60|0.07|0.03|N|O|1996-11-09|1996-11-23|1996-11-28|NONE|MAIL|riously. even, ironic instructions after|
+1283|124|9|5|43|44037.16|0.01|0.04|N|O|1996-09-29|1996-11-19|1996-10-26|TAKE BACK RETURN|RAIL|requests sleep slyly about the |
+1283|8|5|6|30|27240.00|0.06|0.07|N|O|1996-11-22|1996-11-22|1996-12-15|COLLECT COD|TRUCK|t the fluffily|
+1283|197|8|7|21|23040.99|0.04|0.03|N|O|1996-09-12|1996-10-02|1996-10-12|NONE|REG AIR|fully regular |
+1284|178|7|1|49|52830.33|0.00|0.06|N|O|1996-04-11|1996-03-04|1996-04-16|NONE|MAIL|lar packages. special packages ac|
+1284|6|7|2|4|3624.00|0.07|0.06|N|O|1996-02-29|1996-02-11|1996-03-01|TAKE BACK RETURN|TRUCK| regular asymptotes. |
+1284|133|4|3|39|40292.07|0.08|0.00|N|O|1996-01-11|1996-02-07|1996-02-05|COLLECT COD|MAIL|even accoun|
+1284|59|10|4|1|959.05|0.01|0.07|N|O|1996-04-28|1996-04-02|1996-05-08|DELIVER IN PERSON|SHIP|al packages use carefully express de|
+1284|34|5|5|9|8406.27|0.05|0.06|N|O|1996-03-03|1996-03-19|1996-04-01|DELIVER IN PERSON|REG AIR|after the pending|
+1285|22|3|1|12|11064.24|0.00|0.06|A|F|1992-06-21|1992-08-16|1992-07-12|COLLECT COD|MAIL|ss foxes. blithe theodolites cajole slyly|
+1285|143|10|2|45|46941.30|0.01|0.02|R|F|1992-09-05|1992-08-08|1992-10-02|COLLECT COD|REG AIR| special requests haggle blithely.|
+1285|189|10|3|4|4356.72|0.09|0.06|A|F|1992-07-20|1992-08-17|1992-07-26|DELIVER IN PERSON|FOB|l packages sleep slyly quiet i|
+1285|188|9|4|39|42439.02|0.05|0.01|A|F|1992-09-15|1992-08-05|1992-10-05|DELIVER IN PERSON|TRUCK|uctions. car|
+1285|84|5|5|33|32474.64|0.00|0.08|R|F|1992-09-08|1992-08-25|1992-09-16|NONE|SHIP|ites affix|
+1286|178|9|1|49|52830.33|0.08|0.01|R|F|1993-06-24|1993-08-12|1993-06-26|DELIVER IN PERSON|SHIP|gged accoun|
+1286|49|6|2|48|45553.92|0.01|0.04|A|F|1993-07-11|1993-07-11|1993-08-01|COLLECT COD|TRUCK|unts alongs|
+1286|189|10|3|11|11980.98|0.03|0.04|R|F|1993-08-08|1993-07-30|1993-09-05|DELIVER IN PERSON|FOB| slyly even packages. requ|
+1286|184|5|4|37|40114.66|0.00|0.02|R|F|1993-05-27|1993-07-11|1993-06-01|COLLECT COD|SHIP|lyly ironic pinto beans cajole furiously s|
+1286|165|10|5|14|14912.24|0.00|0.01|R|F|1993-05-23|1993-08-09|1993-06-01|NONE|REG AIR|blithely bo|
+1286|146|5|6|41|42891.74|0.04|0.05|R|F|1993-08-02|1993-08-06|1993-08-07|TAKE BACK RETURN|FOB| the furiously expre|
+1287|174|3|1|35|37595.95|0.09|0.06|A|F|1994-09-07|1994-09-12|1994-09-30|TAKE BACK RETURN|FOB|s wake unusual grou|
+1287|95|8|2|10|9950.90|0.08|0.03|R|F|1994-07-08|1994-08-28|1994-07-10|TAKE BACK RETURN|RAIL|thely alongside of the unusual, ironic pa|
+1287|1|2|3|30|27030.00|0.00|0.07|R|F|1994-07-12|1994-09-23|1994-08-07|NONE|RAIL|ar packages. even, even|
+1287|62|7|4|10|9620.60|0.01|0.05|A|F|1994-09-03|1994-08-12|1994-09-16|TAKE BACK RETURN|REG AIR|ding, regular accounts|
+1287|179|8|5|21|22662.57|0.06|0.02|A|F|1994-10-06|1994-09-25|1994-10-16|TAKE BACK RETURN|TRUCK|y quickly bold theodoli|
+1287|21|10|6|26|23946.52|0.03|0.08|R|F|1994-10-03|1994-09-27|1994-10-30|DELIVER IN PERSON|RAIL|egular foxes. theodolites nag along t|
+1312|81|2|1|9|8829.72|0.04|0.08|R|F|1994-07-19|1994-06-29|1994-07-24|TAKE BACK RETURN|MAIL|. furiously |
+1312|136|7|2|28|29011.64|0.06|0.06|A|F|1994-09-09|1994-08-01|1994-10-02|TAKE BACK RETURN|FOB|uriously final frays should use quick|
+1312|173|1|3|18|19317.06|0.03|0.07|A|F|1994-09-13|1994-07-08|1994-09-22|TAKE BACK RETURN|MAIL|. slyly ironic|
+1313|52|4|1|48|45698.40|0.01|0.03|A|F|1994-12-20|1994-10-29|1995-01-07|COLLECT COD|MAIL|s are quick|
+1314|198|10|1|5|5490.95|0.03|0.01|A|F|1994-05-26|1994-08-06|1994-05-31|TAKE BACK RETURN|AIR|equests nag across the furious|
+1314|110|5|2|39|39394.29|0.01|0.03|R|F|1994-08-09|1994-06-14|1994-08-31|TAKE BACK RETURN|TRUCK| unusual accounts slee|
+1314|41|2|3|11|10351.44|0.01|0.04|A|F|1994-05-16|1994-07-30|1994-05-31|COLLECT COD|REG AIR|tegrate furious|
+1315|96|8|1|27|26894.43|0.01|0.03|N|O|1998-07-04|1998-06-13|1998-07-28|NONE|SHIP|latelets. fluffily ironic account|
+1315|16|6|2|15|13740.15|0.05|0.01|N|O|1998-07-12|1998-06-10|1998-08-07|COLLECT COD|AIR|. foxes integrate carefully special|
+1315|168|3|3|25|26704.00|0.01|0.08|N|O|1998-06-26|1998-06-10|1998-07-06|TAKE BACK RETURN|FOB|lites. unusual foxes affi|
+1315|161|6|4|19|20162.04|0.02|0.05|N|O|1998-07-05|1998-05-23|1998-08-04|TAKE BACK RETURN|SHIP|nal, regular warhorses about the fu|
+1315|159|7|5|32|33892.80|0.10|0.05|N|O|1998-03-30|1998-06-12|1998-04-25|NONE|SHIP|neath the final p|
+1316|127|6|1|46|47247.52|0.05|0.04|A|F|1994-01-13|1994-01-24|1994-02-03|COLLECT COD|TRUCK|ges haggle of the|
+1316|79|9|2|15|14686.05|0.02|0.01|R|F|1994-03-12|1994-03-02|1994-03-14|COLLECT COD|FOB|se. furiously final depo|
+1316|198|9|3|33|36240.27|0.10|0.06|R|F|1994-03-31|1994-01-23|1994-04-20|TAKE BACK RETURN|AIR|manently; blithely special deposits|
+1316|66|3|4|15|14490.90|0.00|0.06|R|F|1993-12-17|1994-02-04|1993-12-20|NONE|RAIL|fully express dugouts. furiously silent ide|
+1316|41|2|5|40|37641.60|0.01|0.03|R|F|1994-02-04|1994-02-09|1994-02-27|NONE|REG AIR|l dugouts. co|
+1316|4|7|6|7|6328.00|0.05|0.04|A|F|1993-12-09|1994-01-12|1993-12-30|TAKE BACK RETURN|MAIL|. furiously even accounts a|
+1316|163|8|7|8|8505.28|0.10|0.04|A|F|1994-03-26|1994-02-08|1994-04-19|NONE|SHIP|packages against the express requests wa|
+1317|134|5|1|34|35160.42|0.08|0.04|N|O|1995-08-13|1995-08-08|1995-09-10|COLLECT COD|RAIL|deposits boost thinly blithely final id|
+1317|160|2|2|7|7421.12|0.05|0.01|A|F|1995-06-08|1995-08-03|1995-06-16|TAKE BACK RETURN|SHIP| pinto beans according to the final, pend|
+1317|158|9|3|26|27511.90|0.01|0.02|N|O|1995-07-13|1995-06-26|1995-08-06|COLLECT COD|RAIL|leep along th|
+1317|106|3|4|35|35213.50|0.05|0.02|N|O|1995-07-16|1995-07-07|1995-07-22|TAKE BACK RETURN|FOB|r packages impress blithely car|
+1317|150|9|5|36|37805.40|0.02|0.00|N|O|1995-09-03|1995-07-06|1995-09-04|DELIVER IN PERSON|AIR| deposits. quic|
+1318|114|4|1|24|24338.64|0.08|0.06|N|O|1998-09-27|1998-09-15|1998-10-12|TAKE BACK RETURN|AIR|ual, unusual packages. fluffy, iro|
+1318|46|3|2|26|24597.04|0.01|0.03|N|O|1998-09-26|1998-08-09|1998-10-07|DELIVER IN PERSON|FOB|ly. regular, u|
+1318|129|4|3|31|31902.72|0.01|0.04|N|O|1998-08-25|1998-07-31|1998-08-31|COLLECT COD|AIR|ve the carefully expr|
+1319|61|8|1|21|20182.26|0.03|0.04|N|O|1996-10-05|1996-12-02|1996-10-28|COLLECT COD|FOB|s: carefully express |
+1319|37|8|2|12|11244.36|0.09|0.05|N|O|1996-11-05|1996-12-12|1996-11-29|DELIVER IN PERSON|TRUCK|packages integrate furiously. expres|
+1344|141|4|1|15|15617.10|0.10|0.07|A|F|1992-06-22|1992-06-24|1992-06-23|TAKE BACK RETURN|MAIL|rding to the blithely ironic theodolite|
+1344|190|1|2|29|31615.51|0.09|0.00|A|F|1992-07-17|1992-06-07|1992-07-21|NONE|REG AIR|ffily quiet foxes wake blithely. slyly |
+1345|198|9|1|49|53811.31|0.08|0.00|A|F|1992-12-27|1993-01-23|1993-01-06|NONE|FOB|sly. furiously final accounts are blithely |
+1345|12|9|2|37|33744.37|0.10|0.07|A|F|1992-11-27|1992-12-11|1992-12-07|COLLECT COD|FOB|e slyly express requests. ironic accounts c|
+1345|57|8|3|31|29668.55|0.08|0.07|R|F|1992-12-02|1992-12-29|1992-12-14|COLLECT COD|REG AIR|. slyly silent accounts sublat|
+1346|160|8|1|29|30744.64|0.07|0.05|A|F|1992-08-18|1992-09-15|1992-09-17|TAKE BACK RETURN|REG AIR|the pinto |
+1346|125|6|2|48|49205.76|0.06|0.03|A|F|1992-09-28|1992-07-22|1992-10-13|TAKE BACK RETURN|REG AIR| along the carefully spec|
+1346|54|5|3|13|12402.65|0.10|0.04|A|F|1992-07-22|1992-08-10|1992-08-06|NONE|SHIP|arefully brave deposits into the slyly iro|
+1346|124|5|4|6|6144.72|0.02|0.02|R|F|1992-09-13|1992-07-21|1992-09-27|TAKE BACK RETURN|AIR|inst the furiously final theodolites. caref|
+1346|187|8|5|30|32615.40|0.01|0.07|R|F|1992-10-01|1992-07-22|1992-10-24|NONE|SHIP| nag blithely. unusual, ru|
+1346|16|6|6|45|41220.45|0.02|0.04|A|F|1992-09-11|1992-08-06|1992-09-12|COLLECT COD|FOB|press deposits.|
+1347|81|2|1|45|44148.60|0.02|0.05|N|O|1997-08-24|1997-09-03|1997-09-08|COLLECT COD|AIR|ages wake around t|
+1347|143|6|2|34|35466.76|0.07|0.04|N|O|1997-06-25|1997-09-08|1997-07-24|COLLECT COD|FOB|r packages. f|
+1347|185|6|3|23|24959.14|0.03|0.04|N|O|1997-07-31|1997-08-25|1997-08-21|COLLECT COD|SHIP|ronic pinto beans. express reques|
+1347|113|7|4|28|28367.08|0.01|0.00|N|O|1997-07-30|1997-07-22|1997-08-18|TAKE BACK RETURN|FOB|foxes after the blithely special i|
+1347|65|6|5|9|8685.54|0.01|0.03|N|O|1997-08-28|1997-09-16|1997-09-26|DELIVER IN PERSON|AIR| detect blithely above the fina|
+1347|153|8|6|21|22116.15|0.06|0.04|N|O|1997-10-10|1997-08-16|1997-11-02|NONE|FOB|g pinto beans affix car|
+1347|51|3|7|10|9510.50|0.02|0.07|N|O|1997-07-04|1997-07-23|1997-07-05|DELIVER IN PERSON|SHIP|y ironic pin|
+1348|95|7|1|13|12936.17|0.01|0.01|N|O|1998-04-28|1998-06-05|1998-05-12|TAKE BACK RETURN|SHIP| blithely r|
+1348|22|5|2|41|37802.82|0.07|0.03|N|O|1998-05-02|1998-05-26|1998-05-09|COLLECT COD|RAIL|kages. platelets about the ca|
+1348|199|10|3|40|43967.60|0.07|0.05|N|O|1998-08-14|1998-07-10|1998-08-27|COLLECT COD|AIR|fter the regu|
+1348|98|1|4|2|1996.18|0.01|0.04|N|O|1998-05-30|1998-06-20|1998-06-05|COLLECT COD|MAIL|lly final packages use fluffily express ac|
+1349|181|2|1|1|1081.18|0.06|0.03|N|O|1998-01-07|1998-01-14|1998-02-03|COLLECT COD|REG AIR| express inst|
+1349|118|2|2|45|45814.95|0.03|0.02|N|O|1997-12-24|1998-01-17|1997-12-28|NONE|AIR| ironic, unusual deposits wake carefu|
+1350|54|9|1|21|20035.05|0.04|0.04|A|F|1993-12-17|1993-10-17|1993-12-25|COLLECT COD|REG AIR|lyly above the evenly |
+1350|44|5|2|32|30209.28|0.03|0.00|R|F|1993-11-18|1993-09-30|1993-12-16|COLLECT COD|MAIL|ic, final |
+1351|108|9|1|25|25202.50|0.06|0.04|N|O|1998-06-02|1998-05-25|1998-06-22|COLLECT COD|SHIP|iously regul|
+1376|169|8|1|22|23521.52|0.01|0.03|N|O|1997-08-05|1997-07-08|1997-09-03|NONE|REG AIR|inst the final, pending |
+1377|154|6|1|5|5270.75|0.06|0.05|N|O|1998-05-06|1998-07-08|1998-06-01|TAKE BACK RETURN|FOB| final, final grouches. accoun|
+1377|33|9|2|3|2799.09|0.10|0.04|N|O|1998-04-30|1998-07-02|1998-05-14|DELIVER IN PERSON|REG AIR|yly enticing requ|
+1377|84|5|3|26|25586.08|0.07|0.07|N|O|1998-05-28|1998-06-11|1998-06-25|COLLECT COD|SHIP|egular deposits. quickly regular acco|
+1377|121|4|4|39|39823.68|0.00|0.03|N|O|1998-07-27|1998-07-18|1998-08-13|DELIVER IN PERSON|SHIP|e ironic, regular requests. carefully |
+1377|33|9|5|19|17727.57|0.10|0.00|N|O|1998-06-20|1998-06-27|1998-07-20|NONE|AIR|ught to are bold foxes|
+1377|154|6|6|17|17920.55|0.03|0.04|N|O|1998-06-19|1998-07-20|1998-07-14|NONE|REG AIR|s must have to mold b|
+1378|197|10|1|34|37304.46|0.09|0.07|N|O|1996-07-08|1996-04-23|1996-07-09|COLLECT COD|RAIL|le furiously slyly final accounts. careful|
+1378|124|9|2|18|18434.16|0.05|0.02|N|O|1996-06-19|1996-05-16|1996-06-21|DELIVER IN PERSON|RAIL| theodolites. i|
+1378|73|4|3|11|10703.77|0.10|0.03|N|O|1996-06-07|1996-05-09|1996-07-05|COLLECT COD|TRUCK| blithely express hoc|
+1378|171|2|4|12|12854.04|0.02|0.06|N|O|1996-06-16|1996-05-23|1996-07-09|COLLECT COD|SHIP|notornis. b|
+1378|156|7|5|9|9505.35|0.06|0.05|N|O|1996-04-20|1996-04-13|1996-05-09|COLLECT COD|REG AIR|e carefully. carefully iron|
+1378|194|6|6|29|31731.51|0.05|0.05|N|O|1996-04-15|1996-04-23|1996-05-14|NONE|REG AIR|ual packages are furiously blith|
+1379|73|3|1|13|12649.91|0.04|0.01|N|O|1998-06-08|1998-07-13|1998-06-16|NONE|AIR|ully across the furiously iron|
+1379|118|2|2|50|50905.50|0.07|0.08|N|O|1998-08-31|1998-07-13|1998-09-02|TAKE BACK RETURN|FOB|olphins. ca|
+1379|13|7|3|24|21912.24|0.05|0.02|N|O|1998-07-06|1998-07-09|1998-07-29|DELIVER IN PERSON|MAIL|ages cajole carefully idly express re|
+1380|149|2|1|6|6294.84|0.00|0.04|N|O|1996-08-06|1996-10-01|1996-08-14|NONE|RAIL|e foxes. slyly specia|
+1380|141|4|2|40|41645.60|0.02|0.02|N|O|1996-10-01|1996-08-14|1996-10-20|COLLECT COD|RAIL|ly final frets. ironic,|
+1380|78|9|3|15|14671.05|0.05|0.02|N|O|1996-07-14|1996-08-12|1996-08-03|NONE|FOB|riously ironic foxes aff|
+1380|61|10|4|33|31714.98|0.04|0.07|N|O|1996-08-23|1996-10-01|1996-09-18|TAKE BACK RETURN|SHIP|e ironic, even excuses haggle |
+1381|144|1|1|47|49074.58|0.08|0.04|N|O|1998-09-22|1998-08-12|1998-10-12|DELIVER IN PERSON|AIR|ly ironic deposits|
+1381|34|10|2|12|11208.36|0.07|0.08|N|O|1998-08-13|1998-08-12|1998-08-28|TAKE BACK RETURN|AIR| furiously regular package|
+1382|162|3|1|18|19118.88|0.08|0.03|R|F|1993-08-30|1993-10-19|1993-09-03|DELIVER IN PERSON|AIR|hely regular deposits. fluffy s|
+1382|181|2|2|29|31354.22|0.08|0.04|A|F|1993-10-08|1993-11-11|1993-10-10|COLLECT COD|FOB| haggle: closely even asymptot|
+1382|178|7|3|43|46361.31|0.10|0.04|A|F|1993-09-02|1993-10-06|1993-09-15|DELIVER IN PERSON|AIR|ress deposits. slyly ironic foxes are blit|
+1382|181|2|4|11|11892.98|0.04|0.04|R|F|1993-09-17|1993-09-29|1993-09-21|NONE|SHIP|furiously unusual packages play quickly |
+1382|157|8|5|31|32771.65|0.07|0.03|R|F|1993-10-26|1993-10-15|1993-11-09|TAKE BACK RETURN|FOB|hely regular dependencies. f|
+1382|10|5|6|38|34580.38|0.07|0.07|R|F|1993-11-17|1993-09-28|1993-11-20|COLLECT COD|SHIP|ake pending pinto beans. s|
+1382|23|4|7|5|4615.10|0.07|0.01|R|F|1993-10-02|1993-09-29|1993-10-12|DELIVER IN PERSON|REG AIR|ter the carefully final excuses. blit|
+1383|193|7|1|14|15304.66|0.07|0.06|A|F|1993-08-25|1993-07-09|1993-09-12|DELIVER IN PERSON|RAIL|ole carefully silent requests. car|
+1383|161|10|2|19|20162.04|0.06|0.04|R|F|1993-05-24|1993-07-07|1993-06-14|NONE|AIR|lyly unusual accounts sle|
+1408|148|7|1|29|30396.06|0.03|0.04|N|O|1998-03-12|1998-02-14|1998-03-17|COLLECT COD|MAIL|en accounts grow furiousl|
+1408|173|2|2|7|7512.19|0.05|0.06|N|O|1998-01-14|1998-03-21|1998-01-29|COLLECT COD|AIR|fully final instructions. theodolites ca|
+1408|76|6|3|11|10736.77|0.00|0.03|N|O|1998-04-04|1998-01-29|1998-04-18|NONE|REG AIR|y even accounts thrash care|
+1408|148|5|4|20|20962.80|0.06|0.00|N|O|1998-04-21|1998-01-25|1998-05-12|DELIVER IN PERSON|TRUCK| blithely fluffi|
+1408|170|1|5|41|43876.97|0.02|0.06|N|O|1998-02-25|1998-02-03|1998-03-13|COLLECT COD|REG AIR|ep along the fina|
+1408|134|10|6|42|43433.46|0.05|0.08|N|O|1998-01-30|1998-02-07|1998-02-18|TAKE BACK RETURN|REG AIR|even packages. even accounts cajole|
+1408|55|6|7|26|24831.30|0.00|0.00|N|O|1998-03-19|1998-03-14|1998-04-01|COLLECT COD|RAIL|ic foxes ca|
+1409|99|1|1|23|22979.07|0.01|0.03|A|F|1993-04-18|1993-02-25|1993-05-06|DELIVER IN PERSON|FOB|ions. slyly ironic packages wake quick|
+1409|65|2|2|36|34742.16|0.09|0.02|A|F|1993-01-27|1993-01-31|1993-02-07|COLLECT COD|FOB|ncies sleep carefully r|
+1409|160|1|3|17|18022.72|0.07|0.00|R|F|1993-04-15|1993-03-01|1993-04-29|NONE|REG AIR|pending accounts poach. care|
+1410|121|10|1|15|15316.80|0.06|0.05|N|O|1997-05-25|1997-07-08|1997-06-15|NONE|SHIP| bold packages are fluf|
+1410|179|9|2|18|19425.06|0.03|0.00|N|O|1997-06-03|1997-05-17|1997-06-07|TAKE BACK RETURN|RAIL|gle furiously fluffily regular requests|
+1410|109|4|3|37|37336.70|0.02|0.01|N|O|1997-04-17|1997-06-18|1997-04-19|COLLECT COD|TRUCK|to beans b|
+1410|188|9|4|22|23939.96|0.10|0.00|N|O|1997-07-31|1997-05-17|1997-08-19|TAKE BACK RETURN|RAIL|gular account|
+1410|66|1|5|25|24151.50|0.09|0.02|N|O|1997-05-07|1997-07-10|1997-05-16|NONE|REG AIR|unts haggle against the furiously fina|
+1411|17|7|1|9|8253.09|0.06|0.04|A|F|1995-03-08|1995-03-04|1995-03-11|DELIVER IN PERSON|AIR|accounts. furiou|
+1411|107|8|2|26|26184.60|0.02|0.02|A|F|1995-04-12|1995-01-24|1995-05-03|TAKE BACK RETURN|TRUCK|c packages. |
+1411|27|6|3|37|34299.74|0.00|0.06|A|F|1995-02-27|1995-03-02|1995-03-24|NONE|MAIL|d excuses. furiously final pear|
+1411|200|3|4|20|22004.00|0.01|0.03|R|F|1995-04-06|1995-03-16|1995-04-17|COLLECT COD|FOB|s against the|
+1411|83|4|5|46|45221.68|0.08|0.05|A|F|1995-04-03|1995-01-20|1995-04-05|DELIVER IN PERSON|REG AIR|ly daring instructions|
+1411|77|6|6|30|29312.10|0.09|0.04|A|F|1995-01-12|1995-02-01|1995-01-23|DELIVER IN PERSON|MAIL|ious foxes wake courts. caref|
+1412|58|3|1|37|35447.85|0.06|0.01|A|F|1993-04-10|1993-04-19|1993-04-12|DELIVER IN PERSON|RAIL|hely express excuses are |
+1412|156|1|2|20|21123.00|0.10|0.05|A|F|1993-07-04|1993-05-18|1993-07-22|DELIVER IN PERSON|REG AIR|odolites sleep ironically|
+1412|23|2|3|2|1846.04|0.10|0.07|R|F|1993-04-01|1993-05-03|1993-04-12|DELIVER IN PERSON|REG AIR|s among the requests are a|
+1412|167|8|4|11|11738.76|0.05|0.07|R|F|1993-05-27|1993-05-30|1993-06-07|DELIVER IN PERSON|MAIL|en packages. regular packages dete|
+1412|158|6|5|11|11639.65|0.08|0.06|A|F|1993-03-30|1993-05-25|1993-04-21|NONE|FOB|se slyly. special, unusual accounts nag bl|
+1413|178|9|1|18|19407.06|0.08|0.05|N|O|1997-10-11|1997-08-17|1997-10-25|NONE|FOB|yly bold packages haggle quickly acr|
+1413|165|10|2|49|52192.84|0.07|0.06|N|O|1997-08-28|1997-08-23|1997-09-12|DELIVER IN PERSON|MAIL|nstructions br|
+1413|42|9|3|6|5652.24|0.04|0.02|N|O|1997-09-07|1997-07-30|1997-09-21|TAKE BACK RETURN|MAIL|lithely excuses. f|
+1414|38|4|1|39|36583.17|0.10|0.03|N|O|1995-09-22|1995-09-30|1995-10-07|NONE|MAIL|quickly aro|
+1414|107|8|2|4|4028.40|0.02|0.05|N|O|1995-09-16|1995-11-01|1995-10-02|COLLECT COD|AIR| haggle quickly|
+1415|149|10|1|25|26228.50|0.06|0.00|A|F|1994-09-03|1994-07-12|1994-09-13|DELIVER IN PERSON|RAIL|ect never fluff|
+1440|193|6|1|3|3279.57|0.06|0.01|N|O|1995-10-30|1995-10-17|1995-11-08|COLLECT COD|SHIP|instructions boost. fluffily regul|
+1440|114|4|2|46|46649.06|0.02|0.03|N|O|1995-09-21|1995-10-19|1995-10-19|NONE|RAIL|blithely even instructions. |
+1441|144|7|1|5|5220.70|0.04|0.01|N|O|1997-05-17|1997-05-11|1997-05-30|COLLECT COD|MAIL|egular courts. fluffily even grouches |
+1441|177|7|2|5|5385.85|0.02|0.05|N|O|1997-04-25|1997-04-16|1997-05-23|COLLECT COD|FOB|he quickly enticing pac|
+1441|118|5|3|14|14253.54|0.01|0.03|N|O|1997-06-30|1997-04-29|1997-07-24|DELIVER IN PERSON|REG AIR|special requests ha|
+1441|160|8|4|37|39225.92|0.01|0.00|N|O|1997-04-26|1997-04-27|1997-04-29|NONE|REG AIR|accounts. slyly special dolphins b|
+1441|72|10|5|34|33050.38|0.09|0.00|N|O|1997-06-12|1997-05-11|1997-06-29|TAKE BACK RETURN|RAIL|e carefully. blithely ironic dep|
+1441|25|4|6|15|13875.30|0.09|0.08|N|O|1997-05-21|1997-05-06|1997-06-04|NONE|REG AIR| dependencies-- cour|
+1441|96|10|7|50|49804.50|0.03|0.01|N|O|1997-06-07|1997-05-12|1997-06-08|NONE|SHIP| requests. blithely e|
+1442|26|5|1|8|7408.16|0.05|0.01|A|F|1994-10-31|1994-09-04|1994-11-25|COLLECT COD|AIR|c deposits haggle after the even|
+1443|34|10|1|47|43899.41|0.04|0.06|N|O|1997-02-05|1997-02-02|1997-03-03|NONE|RAIL|carefully ironic requests sl|
+1444|170|5|1|42|44947.14|0.01|0.02|R|F|1994-12-22|1995-03-03|1994-12-31|NONE|SHIP|ly bold packages boost regular ideas. spe|
+1444|57|2|2|34|32539.70|0.04|0.08|A|F|1995-02-22|1995-02-15|1995-03-19|TAKE BACK RETURN|AIR|y. doggedly pend|
+1444|155|3|3|34|35875.10|0.02|0.07|R|F|1994-12-17|1995-01-12|1995-01-03|COLLECT COD|AIR|ular accounts |
+1444|119|6|4|6|6114.66|0.06|0.03|A|F|1995-01-07|1995-03-05|1995-01-17|COLLECT COD|RAIL|al accounts. br|
+1444|20|1|5|35|32200.70|0.02|0.05|A|F|1995-02-25|1995-03-05|1995-03-24|DELIVER IN PERSON|SHIP|aggle furiou|
+1444|33|4|6|42|39187.26|0.00|0.02|A|F|1994-12-16|1995-02-18|1994-12-22|DELIVER IN PERSON|RAIL|ss requests. ironic ideas wake above|
+1444|82|3|7|12|11784.96|0.00|0.03|R|F|1994-12-23|1995-01-15|1995-01-13|COLLECT COD|TRUCK|ly among the bol|
+1445|100|1|1|24|24002.40|0.01|0.00|A|F|1995-02-21|1995-02-22|1995-03-18|DELIVER IN PERSON|SHIP|al accounts use furiously a|
+1445|67|8|2|48|46418.88|0.10|0.02|A|F|1995-02-28|1995-03-16|1995-03-12|COLLECT COD|MAIL|. final ideas are carefully dar|
+1445|192|4|3|7|7645.33|0.10|0.04|A|F|1995-04-25|1995-02-25|1995-05-10|NONE|SHIP|structions: slyly regular re|
+1445|28|1|4|17|15776.34|0.04|0.07|A|F|1995-04-02|1995-04-04|1995-05-01|COLLECT COD|FOB|ges. furiously regular pint|
+1445|135|1|5|24|24843.12|0.10|0.06|R|F|1995-04-23|1995-02-16|1995-05-18|NONE|REG AIR|rate after the carefully reg|
+1445|168|9|6|39|41658.24|0.03|0.02|A|F|1995-02-05|1995-02-20|1995-02-06|NONE|MAIL|ully unusual reques|
+1446|72|3|1|31|30134.17|0.10|0.02|N|O|1998-05-01|1998-05-17|1998-05-30|NONE|REG AIR|. slyly reg|
+1447|167|4|1|19|20276.04|0.06|0.04|A|F|1993-01-31|1992-12-07|1993-02-04|COLLECT COD|MAIL|. quickly ironic |
+1447|32|3|2|6|5592.18|0.01|0.05|A|F|1992-10-24|1992-12-10|1992-11-05|DELIVER IN PERSON|AIR|as! regular packages poach above the|
+1447|39|5|3|9|8451.27|0.04|0.00|R|F|1992-11-15|1993-01-07|1992-11-29|DELIVER IN PERSON|MAIL|counts wake s|
+1447|22|5|4|8|7376.16|0.09|0.08|R|F|1992-11-20|1993-01-12|1992-12-14|COLLECT COD|FOB|ost carefully |
+1447|130|1|5|23|23692.99|0.02|0.07|A|F|1992-12-07|1992-12-25|1993-01-06|TAKE BACK RETURN|AIR| dazzle quickly deposits. f|
+1447|200|3|6|41|45108.20|0.08|0.02|R|F|1993-01-06|1993-01-05|1993-01-13|TAKE BACK RETURN|MAIL|rts boost s|
+1472|8|5|1|36|32688.00|0.04|0.05|N|O|1996-11-06|1996-11-13|1996-11-12|COLLECT COD|SHIP|riously silent deposits to the pending d|
+1472|133|4|2|26|26861.38|0.03|0.05|N|O|1996-11-08|1996-11-13|1996-12-02|DELIVER IN PERSON|FOB|ic packages w|
+1472|1|8|3|6|5406.00|0.08|0.01|N|O|1996-10-24|1996-11-19|1996-11-23|COLLECT COD|FOB|onic theodolites hinder slyly slyly r|
+1473|54|9|1|50|47702.50|0.04|0.03|N|O|1997-05-05|1997-05-20|1997-05-09|NONE|TRUCK|requests wake express deposits. special, ir|
+1473|68|3|2|32|30977.92|0.00|0.08|N|O|1997-04-18|1997-05-12|1997-05-10|DELIVER IN PERSON|REG AIR|out the packages lose furiously ab|
+1474|15|5|1|5|4575.05|0.05|0.04|A|F|1995-04-22|1995-02-20|1995-05-06|COLLECT COD|SHIP|ully final a|
+1474|123|8|2|30|30693.60|0.04|0.02|A|F|1995-03-23|1995-02-11|1995-04-17|DELIVER IN PERSON|TRUCK|usly. evenly express |
+1474|92|5|3|18|17857.62|0.06|0.02|A|F|1995-01-23|1995-03-28|1995-02-03|NONE|RAIL|after the special|
+1475|168|3|1|15|16022.40|0.08|0.06|N|O|1998-02-12|1997-12-17|1998-03-02|TAKE BACK RETURN|SHIP|xpress requests haggle after the final, fi|
+1475|118|9|2|18|18325.98|0.07|0.00|N|O|1998-03-08|1998-01-18|1998-03-10|TAKE BACK RETURN|AIR|al deposits use. ironic packages along the |
+1475|144|1|3|30|31324.20|0.03|0.02|N|O|1998-03-11|1997-12-30|1998-03-15|COLLECT COD|REG AIR| regular theodolites mold across th|
+1475|187|8|4|50|54359.00|0.03|0.05|N|O|1997-12-14|1997-12-13|1997-12-21|COLLECT COD|AIR|. slyly bold re|
+1475|32|3|5|33|30756.99|0.01|0.06|N|O|1998-01-02|1998-01-27|1998-01-11|NONE|FOB|quickly fluffy|
+1475|50|7|6|12|11400.60|0.04|0.04|N|O|1998-01-09|1997-12-30|1998-01-23|NONE|TRUCK|arefully-- excuses sublate|
+1475|112|3|7|23|23278.53|0.02|0.00|N|O|1998-02-13|1998-02-05|1998-03-08|NONE|TRUCK|hely regular hocke|
+1476|31|7|1|20|18620.60|0.02|0.03|N|O|1996-08-11|1996-09-18|1996-08-26|TAKE BACK RETURN|AIR|. bold deposits are carefully amo|
+1477|72|1|1|31|30134.17|0.00|0.06|N|O|1997-12-16|1997-09-30|1997-12-17|COLLECT COD|RAIL| requests. fluffily final |
+1477|110|7|2|8|8080.88|0.09|0.05|N|O|1997-10-25|1997-10-18|1997-11-16|COLLECT COD|MAIL|ironic realms wake unusual, even ac|
+1477|125|6|3|42|43055.04|0.06|0.00|N|O|1997-11-02|1997-11-02|1997-11-20|DELIVER IN PERSON|SHIP|lithely after the ir|
+1477|107|8|4|32|32227.20|0.05|0.08|N|O|1997-09-12|1997-10-26|1997-10-12|TAKE BACK RETURN|AIR|; quickly regula|
+1477|115|6|5|41|41619.51|0.04|0.06|N|O|1997-12-16|1997-10-31|1998-01-12|DELIVER IN PERSON|REG AIR|y. final pearls kindle. accounts |
+1477|69|6|6|49|47483.94|0.06|0.00|N|O|1997-11-18|1997-11-06|1997-11-27|COLLECT COD|FOB|ise according to the sly, bold p|
+1477|120|4|7|33|33663.96|0.06|0.00|N|O|1997-11-12|1997-11-06|1997-11-24|DELIVER IN PERSON|TRUCK|yly regular p|
+1478|34|5|1|21|19614.63|0.00|0.06|N|O|1997-09-20|1997-10-25|1997-10-06|TAKE BACK RETURN|MAIL| fluffily pending acc|
+1479|149|6|1|33|34621.62|0.10|0.01|N|O|1996-03-12|1996-02-28|1996-03-31|DELIVER IN PERSON|FOB| carefully special courts affix. fluff|
+1504|82|3|1|42|41247.36|0.02|0.03|R|F|1992-10-18|1992-10-14|1992-11-10|TAKE BACK RETURN|FOB|ep. carefully ironic excuses haggle quickl|
+1504|103|10|2|22|22068.20|0.04|0.03|A|F|1992-09-09|1992-10-29|1992-09-10|NONE|REG AIR| accounts sleep. furiou|
+1504|178|8|3|9|9703.53|0.07|0.02|R|F|1992-11-02|1992-10-12|1992-11-15|TAKE BACK RETURN|RAIL|y slyly regular courts.|
+1504|115|2|4|10|10151.10|0.04|0.07|A|F|1992-09-22|1992-10-22|1992-10-13|TAKE BACK RETURN|TRUCK|final theodolites. furiously e|
+1504|20|10|5|7|6440.14|0.02|0.00|R|F|1992-11-20|1992-11-23|1992-12-13|COLLECT COD|MAIL|y final packa|
+1505|120|7|1|4|4080.48|0.09|0.00|A|F|1992-12-14|1992-11-11|1993-01-02|COLLECT COD|SHIP|side of the s|
+1505|123|8|2|50|51156.00|0.00|0.02|R|F|1992-11-22|1992-09-24|1992-11-26|TAKE BACK RETURN|FOB|lyly special platelets. requests ar|
+1506|133|4|1|46|47523.98|0.04|0.05|R|F|1993-01-18|1992-11-11|1993-02-09|COLLECT COD|REG AIR|sits whithout the blithely ironic packages|
+1506|114|4|2|30|30423.30|0.07|0.02|A|F|1992-11-22|1992-10-25|1992-12-04|DELIVER IN PERSON|FOB|deposits cajole |
+1506|191|3|3|28|30553.32|0.10|0.06|A|F|1992-09-22|1992-11-19|1992-10-09|TAKE BACK RETURN|AIR| unwind carefully: theodolit|
+1506|28|7|4|37|34336.74|0.00|0.03|R|F|1992-11-04|1992-12-01|1992-11-23|TAKE BACK RETURN|TRUCK|carefully bold dolphins. accounts su|
+1506|195|8|5|15|16427.85|0.05|0.00|R|F|1992-09-24|1992-11-11|1992-10-05|NONE|REG AIR| carefully fluffy packages-- caref|
+1506|50|3|6|38|36101.90|0.05|0.02|R|F|1992-12-02|1992-12-19|1992-12-29|NONE|REG AIR|xpress, regular excuse|
+1506|169|6|7|4|4276.64|0.07|0.00|R|F|1993-01-03|1992-12-06|1993-01-05|COLLECT COD|REG AIR|posits. furiou|
+1507|68|5|1|25|24201.50|0.01|0.08|R|F|1994-01-07|1994-01-06|1994-01-11|NONE|RAIL|xes. slyly busy de|
+1507|40|6|2|33|31021.32|0.04|0.02|A|F|1993-10-29|1993-12-23|1993-11-14|DELIVER IN PERSON|REG AIR| asymptotes nag furiously above t|
+1507|86|7|3|39|38457.12|0.03|0.07|R|F|1993-11-04|1993-12-16|1993-12-03|TAKE BACK RETURN|REG AIR|ly even instructions.|
+1508|51|3|1|16|15216.80|0.02|0.06|N|O|1998-06-21|1998-05-30|1998-07-11|COLLECT COD|MAIL|riously across the ironic, unusua|
+1508|25|4|2|20|18500.40|0.06|0.01|N|O|1998-04-17|1998-06-11|1998-05-17|DELIVER IN PERSON|MAIL|nic platelets. carefully final fra|
+1508|93|7|3|43|42702.87|0.01|0.02|N|O|1998-06-01|1998-06-24|1998-06-03|TAKE BACK RETURN|TRUCK|ndencies h|
+1508|148|7|4|1|1048.14|0.02|0.02|N|O|1998-07-13|1998-06-03|1998-07-17|TAKE BACK RETURN|AIR|s the blithely bold instruction|
+1508|135|6|5|29|30018.77|0.02|0.00|N|O|1998-08-03|1998-07-08|1998-08-22|COLLECT COD|RAIL|r instructions. carefully|
+1508|3|10|6|5|4515.00|0.06|0.08|N|O|1998-05-22|1998-07-06|1998-06-04|COLLECT COD|REG AIR|cording to the furiously ironic depe|
+1508|117|8|7|38|38650.18|0.03|0.06|N|O|1998-04-30|1998-06-23|1998-05-18|DELIVER IN PERSON|RAIL|tes wake furiously regular w|
+1509|28|7|1|14|12992.28|0.04|0.01|A|F|1993-10-04|1993-09-25|1993-10-21|NONE|TRUCK|nal realms|
+1509|11|2|2|46|41906.46|0.08|0.02|A|F|1993-10-15|1993-10-04|1993-11-01|TAKE BACK RETURN|FOB|uriously regula|
+1509|107|8|3|17|17120.70|0.06|0.05|A|F|1993-07-25|1993-08-28|1993-08-19|DELIVER IN PERSON|AIR| furiously. blithely regular ideas haggle c|
+1509|20|4|4|11|10120.22|0.03|0.08|R|F|1993-11-04|1993-10-03|1993-11-14|TAKE BACK RETURN|FOB|ily ironic packages nod carefully.|
+1509|90|1|5|37|36633.33|0.01|0.08|A|F|1993-08-31|1993-09-10|1993-09-24|NONE|FOB|he slyly even deposits wake a|
+1509|187|8|6|31|33702.58|0.04|0.03|A|F|1993-07-14|1993-08-21|1993-08-06|COLLECT COD|SHIP|ic deposits cajole carefully. quickly bold |
+1509|157|2|7|27|28543.05|0.01|0.01|A|F|1993-09-29|1993-09-08|1993-10-04|TAKE BACK RETURN|FOB|lithely after the |
+1510|98|2|1|11|10978.99|0.09|0.04|N|O|1996-09-23|1996-12-03|1996-10-01|DELIVER IN PERSON|RAIL|e of the unusual accounts. stealthy deposit|
+1510|84|5|2|24|23617.92|0.05|0.04|N|O|1996-10-07|1996-10-22|1996-11-03|DELIVER IN PERSON|REG AIR|yly brave theod|
+1510|190|1|3|36|39246.84|0.07|0.02|N|O|1996-10-02|1996-11-23|1996-10-05|NONE|SHIP|old deposits along the carefully|
+1510|182|3|4|8|8657.44|0.01|0.08|N|O|1996-10-26|1996-11-07|1996-10-30|TAKE BACK RETURN|RAIL|blithely express|
+1510|59|10|5|27|25894.35|0.08|0.06|N|O|1996-10-20|1996-12-05|1996-11-02|NONE|MAIL|he blithely regular req|
+1510|14|5|6|3|2742.03|0.05|0.02|N|O|1996-10-31|1996-12-03|1996-11-13|COLLECT COD|RAIL|along the slyly regular pin|
+1510|22|1|7|50|46101.00|0.04|0.05|N|O|1996-11-01|1996-10-17|1996-11-28|NONE|MAIL|even packages. carefully regular fo|
+1511|98|2|1|29|28944.61|0.01|0.04|N|O|1997-03-17|1997-02-11|1997-03-27|DELIVER IN PERSON|AIR|s cajole furiously against |
+1511|62|9|2|32|30785.92|0.04|0.01|N|O|1997-01-06|1997-03-21|1997-01-26|TAKE BACK RETURN|REG AIR| deposits. carefully ironi|
+1536|194|5|1|5|5470.95|0.08|0.03|N|O|1997-02-08|1997-03-11|1997-03-02|COLLECT COD|MAIL|requests sleep pe|
+1537|18|2|1|17|15606.17|0.01|0.03|A|F|1992-04-12|1992-04-19|1992-04-13|NONE|TRUCK|he regular pack|
+1537|179|8|2|50|53958.50|0.08|0.00|R|F|1992-05-30|1992-05-14|1992-06-23|TAKE BACK RETURN|MAIL|special packages haggle slyly at the silent|
+1537|13|4|3|44|40172.44|0.05|0.04|R|F|1992-04-01|1992-03-31|1992-04-21|NONE|TRUCK|lar courts.|
+1537|140|6|4|3|3120.42|0.08|0.07|R|F|1992-03-20|1992-04-14|1992-03-21|TAKE BACK RETURN|SHIP|s, final ideas detect sl|
+1538|102|5|1|32|32067.20|0.05|0.05|N|O|1995-07-08|1995-07-29|1995-08-01|TAKE BACK RETURN|RAIL|uses maintain blithely. fluffily|
+1538|192|3|2|27|29489.13|0.05|0.01|N|O|1995-09-19|1995-08-03|1995-09-24|DELIVER IN PERSON|TRUCK|ngly even packag|
+1538|130|3|3|36|37084.68|0.08|0.04|N|O|1995-07-11|1995-09-10|1995-07-26|DELIVER IN PERSON|MAIL|al deposits mo|
+1538|104|1|4|28|28114.80|0.10|0.04|N|O|1995-09-19|1995-08-27|1995-10-10|COLLECT COD|RAIL|bout the fluffily unusual|
+1538|178|7|5|13|14016.21|0.01|0.05|N|O|1995-06-26|1995-07-30|1995-07-25|NONE|SHIP|ly. packages sleep f|
+1538|128|3|6|42|43181.04|0.08|0.08|N|O|1995-10-10|1995-09-12|1995-11-08|DELIVER IN PERSON|TRUCK|equests cajole blithely |
+1539|196|9|1|21|23019.99|0.08|0.02|R|F|1995-04-19|1995-05-10|1995-04-27|COLLECT COD|TRUCK|ounts haggle. busy|
+1539|86|7|2|11|10846.88|0.01|0.08|A|F|1995-05-27|1995-04-13|1995-06-10|TAKE BACK RETURN|TRUCK|ly express requests. furiously |
+1539|68|5|3|7|6776.42|0.09|0.04|R|F|1995-05-14|1995-04-16|1995-05-30|DELIVER IN PERSON|AIR|. fluffily reg|
+1540|173|1|1|38|40780.46|0.03|0.01|R|F|1992-09-30|1992-10-27|1992-10-12|TAKE BACK RETURN|SHIP| final grouches bo|
+1540|60|2|2|35|33602.10|0.02|0.07|R|F|1992-10-31|1992-09-04|1992-11-05|TAKE BACK RETURN|SHIP|e blithely a|
+1540|8|3|3|25|22700.00|0.08|0.04|R|F|1992-11-15|1992-10-24|1992-12-14|DELIVER IN PERSON|SHIP|ironic deposits amo|
+1540|25|8|4|6|5550.12|0.09|0.03|R|F|1992-08-28|1992-09-17|1992-09-14|COLLECT COD|MAIL|ing to the slyly express asymptote|
+1540|87|8|5|27|26651.16|0.10|0.08|R|F|1992-12-02|1992-10-18|1992-12-31|NONE|SHIP|carefully final packages; b|
+1541|64|3|1|44|42418.64|0.10|0.05|N|O|1995-08-24|1995-07-13|1995-08-26|TAKE BACK RETURN|MAIL|o beans boost fluffily abou|
+1541|26|7|2|8|7408.16|0.10|0.08|N|F|1995-06-05|1995-08-07|1995-06-21|TAKE BACK RETURN|TRUCK|y pending packages. blithely fi|
+1542|58|9|1|37|35447.85|0.07|0.06|A|F|1993-12-15|1993-10-17|1994-01-07|TAKE BACK RETURN|REG AIR|e blithely unusual accounts. quic|
+1542|3|6|2|12|10836.00|0.09|0.06|R|F|1993-10-29|1993-11-02|1993-11-09|TAKE BACK RETURN|RAIL|carefully |
+1542|6|7|3|18|16308.00|0.05|0.05|R|F|1993-10-17|1993-11-15|1993-10-26|TAKE BACK RETURN|FOB|pending instr|
+1542|143|10|4|21|21905.94|0.01|0.05|R|F|1993-10-13|1993-12-13|1993-11-12|NONE|RAIL|y pending foxes nag blithely |
+1542|155|7|5|46|48536.90|0.00|0.00|R|F|1993-09-28|1993-11-03|1993-10-15|COLLECT COD|FOB|ial instructions. ironically|
+1543|71|10|1|34|33016.38|0.02|0.08|N|O|1997-05-25|1997-03-30|1997-06-04|NONE|AIR|ic requests are ac|
+1543|115|9|2|6|6090.66|0.09|0.01|N|O|1997-04-16|1997-05-20|1997-05-16|DELIVER IN PERSON|MAIL| among the carefully bold or|
+1543|67|8|3|42|40616.52|0.06|0.01|N|O|1997-05-26|1997-03-30|1997-06-12|DELIVER IN PERSON|FOB|its sleep until the fur|
+1543|189|10|4|42|45745.56|0.05|0.06|N|O|1997-04-11|1997-04-11|1997-04-23|TAKE BACK RETURN|MAIL|xpress instructions. regular acc|
+1543|40|1|5|9|8460.36|0.08|0.06|N|O|1997-03-14|1997-05-19|1997-03-26|DELIVER IN PERSON|FOB|ravely special requests |
+1543|49|8|6|3|2847.12|0.10|0.04|N|O|1997-03-29|1997-05-10|1997-04-22|COLLECT COD|MAIL|sleep along the furiou|
+1543|68|7|7|3|2904.18|0.00|0.02|N|O|1997-03-22|1997-04-06|1997-03-30|NONE|AIR|quickly. final accounts haggle slyl|
+1568|90|1|1|36|35643.24|0.02|0.03|N|O|1997-05-31|1997-04-22|1997-06-21|TAKE BACK RETURN|RAIL|platelets-- furiously sly excu|
+1568|9|2|2|46|41814.00|0.04|0.00|N|O|1997-04-06|1997-04-08|1997-04-23|TAKE BACK RETURN|MAIL|g the blithely even acco|
+1569|75|3|1|5|4875.35|0.07|0.00|N|O|1998-04-16|1998-06-21|1998-04-18|COLLECT COD|REG AIR| packages. ironic, even excuses a|
+1569|39|10|2|16|15024.48|0.01|0.08|N|O|1998-04-26|1998-06-16|1998-05-26|COLLECT COD|MAIL|deposits. blithely final asymptotes ac|
+1569|49|10|3|43|40808.72|0.10|0.03|N|O|1998-06-05|1998-05-31|1998-06-28|DELIVER IN PERSON|FOB| instructions.|
+1569|70|1|4|30|29102.10|0.02|0.03|N|O|1998-07-19|1998-06-04|1998-08-10|NONE|SHIP|packages. excuses lose evenly carefully reg|
+1570|183|4|1|25|27079.50|0.00|0.06|N|O|1998-05-03|1998-06-02|1998-06-02|DELIVER IN PERSON|REG AIR|its. slyly regular sentiments|
+1570|86|7|2|7|6902.56|0.05|0.05|N|O|1998-07-10|1998-06-01|1998-07-23|TAKE BACK RETURN|MAIL|requests boost quickly re|
+1571|52|3|1|47|44746.35|0.00|0.05|R|F|1992-12-07|1993-02-24|1993-01-01|TAKE BACK RETURN|REG AIR|ng to the fluffily unusual |
+1571|183|4|2|6|6499.08|0.03|0.00|A|F|1993-01-08|1993-02-13|1993-02-07|COLLECT COD|SHIP| special, ironic depo|
+1571|59|7|3|18|17262.90|0.05|0.08|A|F|1993-01-09|1993-01-12|1993-01-31|COLLECT COD|AIR| pending grouches |
+1571|101|4|4|48|48052.80|0.05|0.05|A|F|1992-12-28|1993-01-04|1993-01-04|DELIVER IN PERSON|RAIL|slyly pending p|
+1571|42|5|5|10|9420.40|0.03|0.06|R|F|1992-12-12|1993-02-13|1992-12-29|DELIVER IN PERSON|AIR|lets. carefully regular ideas wake|
+1571|34|10|6|24|22416.72|0.05|0.07|A|F|1993-03-22|1993-01-31|1993-04-09|NONE|TRUCK|warthogs wake carefully acro|
+1572|24|5|1|41|37884.82|0.02|0.00|N|O|1996-05-16|1996-04-09|1996-05-28|TAKE BACK RETURN|REG AIR|. pinto beans alongside|
+1572|93|7|2|10|9930.90|0.04|0.06|N|O|1996-05-17|1996-03-26|1996-05-19|NONE|AIR| accounts affix slyly. |
+1573|186|7|1|5|5430.90|0.05|0.01|A|F|1993-04-24|1993-03-13|1993-05-17|TAKE BACK RETURN|MAIL|ymptotes could u|
+1573|31|2|2|17|15827.51|0.00|0.06|R|F|1993-02-24|1993-02-16|1993-03-08|TAKE BACK RETURN|TRUCK|carefully regular deposits. |
+1573|83|4|3|16|15729.28|0.04|0.03|A|F|1993-03-15|1993-03-16|1993-03-31|COLLECT COD|AIR|ely. furiously final requests wake slyl|
+1573|194|7|4|11|12036.09|0.09|0.01|R|F|1993-03-23|1993-03-24|1993-04-12|TAKE BACK RETURN|RAIL|nently pending|
+1573|137|8|5|7|7259.91|0.00|0.01|R|F|1993-01-30|1993-03-14|1993-02-27|DELIVER IN PERSON|SHIP|eodolites sleep slyly. slyly f|
+1573|154|6|6|30|31624.50|0.03|0.01|A|F|1992-12-29|1993-03-06|1993-01-02|DELIVER IN PERSON|TRUCK|. blithely even theodolites boos|
+1574|48|7|1|41|38869.64|0.06|0.02|N|O|1997-03-08|1997-02-09|1997-04-01|COLLECT COD|AIR|s. slyly regular depen|
+1574|191|5|2|50|54559.50|0.00|0.05|N|O|1996-12-14|1997-02-14|1996-12-16|TAKE BACK RETURN|FOB|le regular, regular foxes. blithely e|
+1574|55|3|3|25|23876.25|0.06|0.02|N|O|1997-01-16|1997-02-14|1997-02-12|DELIVER IN PERSON|TRUCK|ly silent accounts.|
+1574|191|4|4|6|6547.14|0.03|0.05|N|O|1997-02-24|1997-02-03|1997-03-01|NONE|AIR|e silent, final packages. speci|
+1574|109|4|5|6|6054.60|0.05|0.05|N|O|1997-02-09|1997-03-02|1997-02-14|COLLECT COD|MAIL|nic, final ideas snooze. |
+1574|5|2|6|42|38010.00|0.07|0.01|N|O|1996-12-19|1997-01-13|1996-12-28|NONE|FOB|o beans according t|
+1574|136|7|7|14|14505.82|0.04|0.01|N|O|1996-12-30|1997-01-19|1997-01-20|NONE|AIR|ily bold a|
+1575|29|10|1|42|39018.84|0.05|0.08|N|O|1995-10-21|1995-11-25|1995-10-24|DELIVER IN PERSON|RAIL|ly pending pinto beans.|
+1575|36|7|2|39|36505.17|0.00|0.06|N|O|1995-10-30|1995-10-15|1995-11-10|COLLECT COD|TRUCK| ironic requests snooze ironic, regular acc|
+1575|2|5|3|12|10824.00|0.01|0.05|N|O|1995-12-27|1995-11-11|1996-01-23|TAKE BACK RETURN|AIR| bold accounts. furi|
+1575|111|1|4|39|39433.29|0.07|0.00|N|O|1995-09-23|1995-11-05|1995-09-25|TAKE BACK RETURN|TRUCK| after the unusual asym|
+1575|83|4|5|10|9830.80|0.09|0.00|N|O|1996-01-10|1995-11-20|1996-01-13|DELIVER IN PERSON|RAIL|k excuses. pinto beans wake a|
+1575|178|6|6|14|15094.38|0.08|0.06|N|O|1995-10-31|1995-12-06|1995-11-30|NONE|AIR|beans breach among the furiously specia|
+1575|117|1|7|48|48821.28|0.08|0.04|N|O|1995-11-19|1995-10-25|1995-12-07|DELIVER IN PERSON|TRUCK|cies. regu|
+1600|172|10|1|20|21443.40|0.02|0.01|R|F|1993-06-16|1993-04-23|1993-07-02|COLLECT COD|FOB|pths sleep blithely about the|
+1600|44|3|2|48|45313.92|0.07|0.02|R|F|1993-04-17|1993-04-14|1993-05-03|DELIVER IN PERSON|FOB|furiously silent foxes could wake. car|
+1600|39|10|3|8|7512.24|0.04|0.07|R|F|1993-03-07|1993-04-22|1993-03-26|TAKE BACK RETURN|FOB|cajole furiously fluf|
+1600|69|8|4|25|24226.50|0.00|0.06|A|F|1993-05-25|1993-04-07|1993-06-05|TAKE BACK RETURN|REG AIR|press packages. ironic excuses bo|
+1600|147|8|5|30|31414.20|0.03|0.08|R|F|1993-06-03|1993-05-03|1993-06-07|DELIVER IN PERSON|RAIL|al escapades alongside of the depo|
+1601|167|8|1|6|6402.96|0.00|0.00|A|F|1994-10-19|1994-09-28|1994-10-23|COLLECT COD|SHIP| bold sheaves. furiously per|
+1601|175|3|2|50|53758.50|0.03|0.02|R|F|1994-12-24|1994-10-23|1995-01-11|COLLECT COD|FOB|ideas doubt|
+1601|90|1|3|14|13861.26|0.04|0.08|R|F|1994-09-17|1994-11-22|1994-10-03|DELIVER IN PERSON|RAIL|he special, fin|
+1602|183|4|1|4|4332.72|0.08|0.06|R|F|1993-10-31|1993-09-05|1993-11-21|NONE|RAIL|y. even excuses|
+1603|39|5|1|1|939.03|0.08|0.00|R|F|1993-08-17|1993-09-04|1993-08-22|TAKE BACK RETURN|REG AIR|d accounts. special warthogs use fur|
+1603|66|5|2|29|28015.74|0.06|0.08|A|F|1993-09-28|1993-09-20|1993-10-28|NONE|SHIP|ses wake furiously. theodolite|
+1604|42|3|1|15|14130.60|0.09|0.08|R|F|1993-09-22|1993-09-03|1993-09-29|TAKE BACK RETURN|MAIL| instructions haggle|
+1604|141|4|2|37|38522.18|0.06|0.06|A|F|1993-08-22|1993-09-21|1993-09-10|COLLECT COD|SHIP|requests. blithely ironic somas s|
+1604|114|8|3|19|19268.09|0.09|0.07|A|F|1993-10-15|1993-10-04|1993-11-09|COLLECT COD|RAIL| ideas. bol|
+1604|175|4|4|15|16127.55|0.03|0.00|R|F|1993-09-10|1993-08-31|1993-09-30|TAKE BACK RETURN|RAIL|ending realms along the special, p|
+1604|21|4|5|23|21183.46|0.08|0.05|A|F|1993-10-11|1993-08-30|1993-10-18|DELIVER IN PERSON|RAIL|en requests. blithely fin|
+1605|142|1|1|47|48980.58|0.00|0.01|N|O|1998-04-29|1998-06-12|1998-05-20|DELIVER IN PERSON|AIR|. carefully r|
+1605|180|8|2|18|19443.24|0.10|0.00|N|O|1998-05-13|1998-06-17|1998-06-03|COLLECT COD|REG AIR|ly regular foxes wake carefully. bol|
+1605|59|10|3|39|37402.95|0.02|0.03|N|O|1998-07-12|1998-06-05|1998-08-09|DELIVER IN PERSON|MAIL|nal dependencies-- quickly final frets acc|
+1605|183|4|4|25|27079.50|0.06|0.02|N|O|1998-05-26|1998-06-14|1998-06-05|COLLECT COD|AIR|ole carefully car|
+1606|115|6|1|21|21317.31|0.04|0.00|N|O|1997-06-02|1997-07-02|1997-06-27|DELIVER IN PERSON|RAIL| pending theodolites prom|
+1606|174|3|2|35|37595.95|0.00|0.02|N|O|1997-06-20|1997-06-19|1997-06-22|COLLECT COD|TRUCK|carefully sil|
+1606|100|4|3|23|23002.30|0.00|0.06|N|O|1997-04-19|1997-06-26|1997-04-30|NONE|MAIL|ously final requests. slowly ironic ex|
+1606|97|9|4|20|19941.80|0.02|0.04|N|O|1997-05-01|1997-05-26|1997-05-28|TAKE BACK RETURN|TRUCK|fily carefu|
+1606|71|10|5|14|13594.98|0.10|0.01|N|O|1997-05-19|1997-07-05|1997-06-10|COLLECT COD|FOB|structions haggle f|
+1607|190|1|1|2|2180.38|0.02|0.00|N|O|1996-01-11|1996-02-15|1996-01-19|DELIVER IN PERSON|MAIL|packages haggle. regular requests boost s|
+1607|119|3|2|37|37707.07|0.05|0.02|N|O|1996-02-27|1996-02-18|1996-03-16|NONE|AIR|alongside |
+1607|123|4|3|39|39901.68|0.00|0.00|N|O|1996-02-01|1996-02-12|1996-02-16|NONE|FOB|uches cajole. accounts ar|
+1607|76|6|4|34|33186.38|0.05|0.06|N|O|1996-01-06|1996-02-24|1996-01-10|DELIVER IN PERSON|SHIP| quickly above the |
+1607|178|8|5|48|51752.16|0.00|0.05|N|O|1996-02-22|1996-02-13|1996-03-09|TAKE BACK RETURN|MAIL|ular forges. deposits a|
+1632|191|5|1|47|51285.93|0.08|0.00|N|O|1997-01-25|1997-02-09|1997-02-19|TAKE BACK RETURN|RAIL|g to the closely special no|
+1632|148|7|2|14|14673.96|0.08|0.05|N|O|1997-01-15|1997-02-25|1997-01-28|NONE|RAIL|oxes. deposits nag slyly along the slyly |
+1632|177|6|3|47|50626.99|0.03|0.04|N|O|1997-01-29|1997-03-03|1997-02-21|NONE|MAIL|sts. blithely regular |
+1632|57|9|4|33|31582.65|0.09|0.02|N|O|1997-04-01|1997-02-24|1997-04-29|TAKE BACK RETURN|REG AIR|ructions! slyly|
+1632|142|1|5|43|44812.02|0.10|0.03|N|O|1997-02-24|1997-02-19|1997-03-25|DELIVER IN PERSON|FOB|ts. blithe, bold ideas cajo|
+1633|178|7|1|35|37735.95|0.01|0.02|N|O|1996-01-09|1995-12-02|1996-01-21|COLLECT COD|REG AIR|ly against the dolph|
+1633|5|6|2|15|13575.00|0.00|0.05|N|O|1995-12-13|1995-11-13|1996-01-04|TAKE BACK RETURN|FOB|ges wake fluffil|
+1634|48|9|1|21|19908.84|0.00|0.00|N|O|1996-10-04|1996-10-22|1996-11-01|NONE|MAIL|counts alo|
+1634|172|3|2|44|47175.48|0.05|0.01|N|O|1996-09-17|1996-11-09|1996-10-03|COLLECT COD|SHIP|requests affix slyly. quickly even pack|
+1634|19|10|3|21|19299.21|0.06|0.07|N|O|1996-11-16|1996-10-21|1996-11-27|NONE|TRUCK|y along the excuses.|
+1634|68|3|4|17|16457.02|0.08|0.07|N|O|1996-10-29|1996-10-15|1996-11-02|TAKE BACK RETURN|SHIP|cial, bold platelets alongside of the f|
+1634|76|7|5|2|1952.14|0.07|0.04|N|O|1996-11-22|1996-10-28|1996-12-17|NONE|SHIP|ly. carefully regular asymptotes wake|
+1634|170|9|6|11|11771.87|0.01|0.08|N|O|1996-10-04|1996-12-06|1996-10-14|DELIVER IN PERSON|SHIP|final requests |
+1634|13|7|7|35|31955.35|0.06|0.02|N|O|1996-11-25|1996-11-25|1996-12-12|TAKE BACK RETURN|RAIL|cies. regular, special de|
+1635|71|1|1|3|2913.21|0.06|0.08|N|O|1997-03-13|1997-03-25|1997-03-27|COLLECT COD|FOB| quickly ironic r|
+1635|90|1|2|8|7920.72|0.04|0.05|N|O|1997-04-30|1997-04-21|1997-05-09|DELIVER IN PERSON|AIR|ravely carefully express |
+1635|114|5|3|20|20282.20|0.07|0.01|N|O|1997-05-19|1997-04-01|1997-06-17|TAKE BACK RETURN|FOB|oost according to the carefully even accou|
+1635|77|5|4|40|39082.80|0.01|0.04|N|O|1997-02-25|1997-03-20|1997-03-12|TAKE BACK RETURN|RAIL|uriously up the ironic deposits. slyly i|
+1636|85|6|1|2|1970.16|0.09|0.03|N|O|1997-09-26|1997-08-22|1997-10-05|NONE|TRUCK|nal foxes cajole above the blithely reg|
+1636|169|10|2|45|48112.20|0.03|0.01|N|O|1997-07-14|1997-08-08|1997-07-27|COLLECT COD|RAIL|ely express reque|
+1636|108|1|3|24|24194.40|0.07|0.08|N|O|1997-10-07|1997-08-12|1997-11-04|TAKE BACK RETURN|MAIL|e carefully unusual ideas are f|
+1636|153|1|4|43|45285.45|0.06|0.00|N|O|1997-08-23|1997-08-10|1997-09-17|NONE|REG AIR|blithely special r|
+1636|19|6|5|22|20218.22|0.05|0.02|N|O|1997-07-22|1997-08-18|1997-08-03|COLLECT COD|AIR|ular, regu|
+1636|63|2|6|34|32744.04|0.10|0.01|N|O|1997-08-11|1997-09-09|1997-08-23|NONE|TRUCK|ular depos|
+1636|114|1|7|7|7098.77|0.04|0.00|N|O|1997-07-28|1997-09-10|1997-07-31|NONE|MAIL|ronic instructions. final|
+1637|86|7|1|49|48317.92|0.02|0.03|N|F|1995-06-08|1995-04-19|1995-07-01|COLLECT COD|REG AIR|. blithely i|
+1637|73|2|2|1|973.07|0.10|0.02|A|F|1995-02-14|1995-03-26|1995-03-09|TAKE BACK RETURN|AIR|ly final pinto beans. furiously|
+1637|22|1|3|10|9220.20|0.02|0.05|R|F|1995-02-21|1995-03-17|1995-03-11|NONE|AIR|uriously? blithely even sauternes wake. |
+1637|93|5|4|42|41709.78|0.06|0.01|A|F|1995-03-18|1995-04-24|1995-03-31|COLLECT COD|SHIP|blithely a|
+1637|5|8|5|25|22625.00|0.05|0.00|R|F|1995-06-07|1995-03-26|1995-06-08|COLLECT COD|RAIL| haggle carefully silent accou|
+1637|109|4|6|38|38345.80|0.02|0.08|R|F|1995-03-20|1995-05-05|1995-04-14|DELIVER IN PERSON|SHIP|even, pending foxes nod regular|
+1637|52|10|7|21|19993.05|0.07|0.08|A|F|1995-04-30|1995-04-30|1995-05-05|COLLECT COD|SHIP|ly ironic theodolites use b|
+1638|6|7|1|46|41676.00|0.03|0.02|N|O|1997-10-16|1997-10-28|1997-11-09|COLLECT COD|MAIL|otes haggle before the slyly bold instructi|
+1638|149|10|2|30|31474.20|0.00|0.04|N|O|1997-12-05|1997-09-17|1997-12-06|NONE|REG AIR|s cajole boldly bold requests. closely |
+1638|31|7|3|5|4655.15|0.08|0.07|N|O|1997-10-15|1997-11-01|1997-11-08|DELIVER IN PERSON|FOB|xcuses sleep furiou|
+1638|56|8|4|19|18164.95|0.00|0.08|N|O|1997-10-15|1997-10-27|1997-11-03|DELIVER IN PERSON|MAIL| quickly expres|
+1638|143|6|5|25|26078.50|0.05|0.03|N|O|1997-10-06|1997-09-30|1997-11-02|DELIVER IN PERSON|REG AIR|gle final, ironic pinto beans. |
+1638|155|10|6|46|48536.90|0.07|0.08|N|O|1997-08-20|1997-10-10|1997-09-09|COLLECT COD|AIR|ckages are carefully even instru|
+1639|187|8|1|24|26092.32|0.07|0.00|N|O|1995-08-24|1995-10-06|1995-08-31|COLLECT COD|REG AIR| the regular packages. courts dou|
+1639|43|6|2|38|35835.52|0.01|0.04|N|O|1995-08-23|1995-11-09|1995-08-29|TAKE BACK RETURN|FOB|y regular packages. b|
+1639|171|10|3|41|43917.97|0.04|0.02|N|O|1995-12-19|1995-11-11|1996-01-12|DELIVER IN PERSON|FOB|structions w|
+1664|118|5|1|48|48869.28|0.04|0.02|N|O|1996-06-21|1996-05-01|1996-07-19|TAKE BACK RETURN|RAIL| use. ironic deposits integrate. slyly unu|
+1664|173|2|2|30|32195.10|0.06|0.05|N|O|1996-04-04|1996-05-04|1996-05-03|COLLECT COD|FOB|ess multip|
+1664|151|2|3|10|10511.50|0.00|0.06|N|O|1996-04-10|1996-05-13|1996-05-07|TAKE BACK RETURN|RAIL|instructions up the acc|
+1664|155|3|4|35|36930.25|0.00|0.04|N|O|1996-03-06|1996-05-16|1996-03-09|DELIVER IN PERSON|REG AIR|y regular ide|
+1664|57|8|5|9|8613.45|0.07|0.04|N|O|1996-04-15|1996-05-14|1996-05-11|DELIVER IN PERSON|TRUCK|ges. fluffil|
+1664|141|8|6|40|41645.60|0.09|0.07|N|O|1996-04-02|1996-04-22|1996-04-17|COLLECT COD|REG AIR|se blithely unusual pains. carefully|
+1665|47|6|1|4|3788.16|0.02|0.03|A|F|1994-09-01|1994-06-07|1994-09-12|DELIVER IN PERSON|TRUCK|ely final requests. requests|
+1665|78|6|2|1|978.07|0.03|0.05|R|F|1994-05-22|1994-07-06|1994-05-24|TAKE BACK RETURN|TRUCK|sly final p|
+1666|185|6|1|30|32555.40|0.04|0.03|N|O|1995-10-28|1995-11-30|1995-11-18|TAKE BACK RETURN|AIR| breach evenly final accounts. r|
+1666|64|1|2|20|19281.20|0.01|0.00|N|O|1996-01-27|1995-12-12|1996-01-31|NONE|REG AIR|uietly regular foxes wake quick|
+1666|134|10|3|31|32058.03|0.05|0.07|N|O|1996-02-11|1996-01-11|1996-02-28|COLLECT COD|RAIL|ding to the express, bold accounts. fu|
+1666|169|8|4|41|43835.56|0.06|0.08|N|O|1995-11-29|1996-01-04|1995-12-24|NONE|TRUCK|ly regular excuses; regular ac|
+1667|21|4|1|6|5526.12|0.04|0.02|N|O|1997-12-07|1997-11-16|1998-01-02|COLLECT COD|FOB|riously busy requests. blithely final a|
+1667|22|1|2|29|26738.58|0.06|0.07|N|O|1997-10-15|1997-11-09|1997-11-11|TAKE BACK RETURN|MAIL|l accounts. furiously final courts h|
+1667|95|8|3|48|47764.32|0.05|0.01|N|O|1998-01-27|1998-01-06|1998-02-09|TAKE BACK RETURN|SHIP|tes sleep furiously. carefully eve|
+1667|59|1|4|24|23017.20|0.04|0.01|N|O|1997-10-14|1997-12-01|1997-11-09|TAKE BACK RETURN|MAIL|hrash final requests. care|
+1667|195|9|5|2|2190.38|0.07|0.00|N|O|1997-12-17|1997-11-22|1998-01-16|NONE|SHIP|pecial requests hag|
+1667|48|7|6|6|5688.24|0.01|0.03|N|O|1998-01-21|1997-12-19|1998-01-28|NONE|TRUCK| nag quickly above th|
+1667|40|6|7|19|17860.76|0.09|0.03|N|O|1998-01-23|1997-11-24|1998-01-26|DELIVER IN PERSON|SHIP|around the pinto beans. express, special|
+1668|132|8|1|8|8257.04|0.06|0.01|N|O|1997-07-23|1997-10-09|1997-08-06|DELIVER IN PERSON|FOB|arefully regular tithes! slyl|
+1668|1|8|2|25|22525.00|0.01|0.06|N|O|1997-08-08|1997-09-28|1997-09-01|NONE|TRUCK|y ironic requests. bold, final ideas a|
+1668|75|5|3|42|40952.94|0.08|0.01|N|O|1997-08-09|1997-09-08|1997-08-31|NONE|FOB|ole carefully excuses. final|
+1668|191|5|4|9|9820.71|0.05|0.03|N|O|1997-10-17|1997-09-05|1997-11-01|COLLECT COD|RAIL|wake furiously even instructions. sil|
+1668|128|9|5|25|25703.00|0.01|0.02|N|O|1997-10-08|1997-09-20|1997-10-11|DELIVER IN PERSON|REG AIR|even platelets across the silent |
+1668|10|3|6|38|34580.38|0.07|0.01|N|O|1997-08-26|1997-09-17|1997-09-05|DELIVER IN PERSON|TRUCK|ep slyly across the furi|
+1669|79|10|1|24|23497.68|0.04|0.08|N|O|1997-09-04|1997-07-30|1997-09-20|DELIVER IN PERSON|RAIL| regular, final deposits use quick|
+1670|32|3|1|41|38213.23|0.07|0.01|N|O|1997-07-19|1997-08-20|1997-07-23|DELIVER IN PERSON|TRUCK|thely according to the sly|
+1670|122|3|2|10|10221.20|0.07|0.03|N|O|1997-09-14|1997-08-16|1997-09-23|NONE|SHIP|fily special ideas |
+1670|186|7|3|41|44533.38|0.07|0.07|N|O|1997-07-19|1997-08-05|1997-07-26|COLLECT COD|SHIP|al gifts. speci|
+1671|149|2|1|21|22031.94|0.02|0.07|N|O|1996-07-28|1996-09-28|1996-08-08|TAKE BACK RETURN|AIR|s accounts slee|
+1671|96|10|2|4|3984.36|0.05|0.00|N|O|1996-08-30|1996-09-19|1996-09-23|DELIVER IN PERSON|TRUCK|lyly regular ac|
+1671|124|3|3|11|11265.32|0.06|0.08|N|O|1996-09-16|1996-10-21|1996-09-18|NONE|SHIP|tes sleep blithely|
+1671|178|7|4|5|5390.85|0.00|0.00|N|O|1996-11-14|1996-10-20|1996-11-25|TAKE BACK RETURN|FOB|luffily regular deposits|
+1671|127|8|5|12|12325.44|0.07|0.04|N|O|1996-11-17|1996-09-02|1996-12-17|COLLECT COD|RAIL|special, ironic|
+1671|197|9|6|46|50470.74|0.08|0.05|N|O|1996-09-13|1996-10-14|1996-09-28|TAKE BACK RETURN|REG AIR|. slyly bold instructions boost. furiousl|
+1696|16|3|1|8|7328.08|0.04|0.02|N|O|1998-04-28|1998-02-07|1998-05-10|NONE|TRUCK|the blithely|
+1696|139|5|2|13|13508.69|0.08|0.06|N|O|1998-03-01|1998-03-25|1998-03-24|TAKE BACK RETURN|TRUCK|tructions play slyly q|
+1696|2|5|3|19|17138.00|0.08|0.05|N|O|1998-05-03|1998-03-13|1998-05-28|TAKE BACK RETURN|REG AIR|its maintain alongside of the f|
+1696|193|4|4|21|22956.99|0.05|0.00|N|O|1998-05-04|1998-02-18|1998-05-07|NONE|MAIL|y players sleep along the final, pending |
+1696|94|7|5|43|42745.87|0.03|0.06|N|O|1998-02-14|1998-03-29|1998-02-20|COLLECT COD|FOB|arefully regular dep|
+1697|75|5|1|6|5850.42|0.05|0.00|N|O|1997-01-28|1996-11-27|1997-01-31|NONE|FOB|accounts breach slyly even de|
+1697|104|7|2|24|24098.40|0.00|0.08|N|O|1996-12-29|1996-12-19|1997-01-10|NONE|SHIP|ts cajole carefully above the carefully|
+1697|124|9|3|27|27651.24|0.06|0.00|N|O|1997-01-20|1996-12-02|1997-02-05|COLLECT COD|MAIL|ly regular packages across the silent, b|
+1697|94|5|4|49|48710.41|0.08|0.04|N|O|1996-12-07|1997-01-02|1996-12-31|COLLECT COD|TRUCK|lar foxes. fluffily furious ideas doubt qu|
+1697|35|1|5|19|17765.57|0.03|0.07|N|O|1997-01-08|1996-11-12|1997-01-11|DELIVER IN PERSON|FOB|ons? special, special accounts after|
+1698|97|8|1|44|43871.96|0.05|0.05|N|O|1997-05-16|1997-07-05|1997-05-27|NONE|RAIL|ts wake slyly after t|
+1698|93|5|2|6|5958.54|0.08|0.00|N|O|1997-08-21|1997-06-08|1997-09-03|DELIVER IN PERSON|RAIL| pending packages affix ne|
+1698|21|6|3|22|20262.44|0.03|0.04|N|O|1997-08-07|1997-05-28|1997-08-24|DELIVER IN PERSON|TRUCK|oward the furiously iro|
+1698|112|6|4|19|19230.09|0.00|0.07|N|O|1997-07-04|1997-06-21|1997-08-01|NONE|RAIL| fluffily e|
+1698|53|4|5|37|35262.85|0.00|0.03|N|O|1997-05-16|1997-05-29|1997-05-27|NONE|AIR|ly regular ideas. deposit|
+1698|166|7|6|15|15992.40|0.10|0.01|N|O|1997-07-20|1997-06-07|1997-07-21|TAKE BACK RETURN|RAIL|final ideas. even, ironic |
+1699|38|9|1|50|46901.50|0.00|0.06|A|F|1994-03-26|1994-03-23|1994-04-20|NONE|FOB|to the final requests are carefully silent |
+1699|135|6|2|17|17597.21|0.07|0.02|R|F|1994-01-12|1994-03-12|1994-02-08|NONE|AIR|haggle blithely slyly|
+1700|140|1|1|38|39525.32|0.04|0.04|N|O|1996-10-03|1996-07-27|1996-10-22|NONE|RAIL|ular dependencies engage slyly |
+1700|156|7|2|49|51751.35|0.04|0.00|N|O|1996-09-26|1996-07-28|1996-10-16|NONE|TRUCK|kly even dependencies haggle fluffi|
+1701|150|9|1|47|49357.05|0.08|0.05|R|F|1992-05-25|1992-06-29|1992-06-15|NONE|RAIL|slyly final requests cajole requests. f|
+1701|54|5|2|2|1908.10|0.01|0.04|R|F|1992-06-24|1992-07-12|1992-06-29|COLLECT COD|SHIP|ween the pending, final accounts. |
+1701|35|1|3|26|24310.78|0.10|0.06|R|F|1992-06-04|1992-07-11|1992-07-04|DELIVER IN PERSON|FOB| accounts. blithely pending pinto be|
+1702|67|2|1|19|18374.14|0.02|0.01|N|F|1995-06-02|1995-06-30|1995-06-29|NONE|REG AIR|ies haggle blith|
+1702|30|5|2|38|35341.14|0.00|0.00|N|O|1995-09-01|1995-06-10|1995-09-10|DELIVER IN PERSON|REG AIR|as believe blithely. bo|
+1702|195|6|3|46|50378.74|0.00|0.08|N|O|1995-07-14|1995-06-30|1995-07-20|NONE|FOB|y even foxes. carefully final dependencies |
+1702|93|4|4|28|27806.52|0.07|0.05|R|F|1995-06-10|1995-07-26|1995-06-16|TAKE BACK RETURN|AIR|nts haggle along the packa|
+1702|89|10|5|34|33628.72|0.01|0.06|N|O|1995-07-04|1995-06-08|1995-07-28|DELIVER IN PERSON|AIR|y careful packages; dogged acco|
+1702|42|9|6|28|26377.12|0.10|0.00|N|O|1995-08-14|1995-07-31|1995-09-08|COLLECT COD|RAIL|ackages sleep. furiously even excuses snooz|
+1703|166|5|1|36|38381.76|0.09|0.01|R|F|1993-04-22|1993-03-05|1993-04-24|DELIVER IN PERSON|SHIP|riously express |
+1703|137|8|2|35|36299.55|0.01|0.08|R|F|1993-04-14|1993-03-31|1993-04-27|NONE|RAIL|he carefully|
+1703|124|5|3|48|49157.76|0.06|0.02|R|F|1993-02-07|1993-04-20|1993-02-24|TAKE BACK RETURN|AIR|ggle slyly furiously regular theodol|
+1728|126|5|1|1|1026.12|0.07|0.04|N|O|1996-09-16|1996-08-19|1996-09-18|COLLECT COD|FOB|lly. carefully ex|
+1728|105|8|2|23|23117.30|0.05|0.02|N|O|1996-09-08|1996-07-24|1996-09-20|NONE|FOB|ns. pending, final ac|
+1728|165|10|3|44|46867.04|0.08|0.07|N|O|1996-07-31|1996-06-22|1996-08-06|COLLECT COD|FOB|ide of the slyly blithe|
+1728|27|8|4|34|31518.68|0.08|0.05|N|O|1996-08-28|1996-07-20|1996-09-12|DELIVER IN PERSON|MAIL|special req|
+1728|199|2|5|31|34074.89|0.09|0.02|N|O|1996-07-26|1996-06-28|1996-08-14|NONE|REG AIR|kly sly theodolites.|
+1729|157|8|1|12|12685.80|0.08|0.04|A|F|1992-08-11|1992-07-24|1992-08-16|COLLECT COD|RAIL|y pending packages detect. carefully re|
+1730|166|5|1|41|43712.56|0.01|0.03|N|O|1998-08-11|1998-08-29|1998-09-02|TAKE BACK RETURN|TRUCK| instructions. unusual, even Tiresi|
+1730|162|3|2|15|15932.40|0.07|0.04|N|O|1998-09-07|1998-09-12|1998-09-30|TAKE BACK RETURN|AIR|pinto beans cajole. bravely bold|
+1730|162|1|3|9|9559.44|0.10|0.00|N|O|1998-09-18|1998-09-15|1998-09-21|DELIVER IN PERSON|FOB|gular dependencies wake. blithely final e|
+1730|10|7|4|40|36400.40|0.02|0.03|N|O|1998-10-02|1998-10-06|1998-10-03|NONE|SHIP|ven dinos slee|
+1730|141|4|5|43|44769.02|0.04|0.06|N|O|1998-10-26|1998-10-22|1998-11-02|DELIVER IN PERSON|TRUCK|ng deposits cajo|
+1731|184|5|1|36|39030.48|0.10|0.00|N|O|1996-04-18|1996-04-03|1996-04-29|TAKE BACK RETURN|MAIL|ngside of the even instruct|
+1731|139|10|2|7|7273.91|0.04|0.07|N|O|1996-04-11|1996-02-13|1996-04-30|DELIVER IN PERSON|REG AIR|fily quick asymptotes|
+1731|51|9|3|50|47552.50|0.05|0.04|N|O|1996-01-14|1996-03-13|1996-01-29|COLLECT COD|RAIL|ly slyly speci|
+1731|196|10|4|23|25212.37|0.10|0.04|N|O|1996-04-22|1996-02-25|1996-05-16|TAKE BACK RETURN|RAIL|rays? bold, express pac|
+1731|53|4|5|37|35262.85|0.10|0.05|N|O|1996-04-30|1996-03-17|1996-05-27|TAKE BACK RETURN|RAIL| beans use furiously slyly b|
+1731|124|7|6|41|41988.92|0.03|0.08|N|O|1996-04-05|1996-02-28|1996-05-01|TAKE BACK RETURN|RAIL|haggle across the blithely ironi|
+1732|5|6|1|50|45250.00|0.02|0.01|R|F|1993-12-05|1994-01-23|1993-12-20|TAKE BACK RETURN|FOB|fily final asymptotes according |
+1732|99|10|2|36|35967.24|0.01|0.03|A|F|1994-03-15|1994-02-09|1994-04-02|DELIVER IN PERSON|TRUCK|ve the accounts. slowly ironic multip|
+1732|161|8|3|41|43507.56|0.00|0.04|R|F|1994-02-20|1994-01-07|1994-02-27|TAKE BACK RETURN|AIR|quests sublate against the silent |
+1732|152|3|4|9|9469.35|0.04|0.04|A|F|1994-02-25|1994-01-29|1994-03-16|TAKE BACK RETURN|FOB|ular platelets. deposits wak|
+1732|169|8|5|25|26729.00|0.02|0.05|A|F|1994-02-15|1994-01-07|1994-02-21|COLLECT COD|REG AIR|nag slyly. even, special de|
+1732|73|1|6|16|15569.12|0.01|0.05|R|F|1994-01-07|1994-01-02|1994-01-25|COLLECT COD|SHIP|ix carefully at the furiously regular pac|
+1733|111|5|1|41|41455.51|0.08|0.01|N|O|1996-06-13|1996-07-08|1996-07-07|TAKE BACK RETURN|AIR|ess notornis. fur|
+1733|24|7|2|16|14784.32|0.00|0.04|N|O|1996-08-28|1996-07-25|1996-09-27|COLLECT COD|MAIL|slyly express deposits sleep abo|
+1733|120|10|3|29|29583.48|0.10|0.06|N|O|1996-07-16|1996-08-08|1996-07-28|NONE|TRUCK|ns detect among the special accounts. qu|
+1733|136|7|4|38|39372.94|0.01|0.03|N|O|1996-08-26|1996-07-23|1996-08-28|NONE|FOB| deposits |
+1733|34|5|5|22|20548.66|0.06|0.07|N|O|1996-07-16|1996-07-24|1996-07-30|COLLECT COD|AIR|gainst the final deposits. carefully final |
+1733|66|7|6|9|8694.54|0.06|0.08|N|O|1996-05-25|1996-07-23|1996-06-10|COLLECT COD|TRUCK|ven foxes was according to t|
+1733|146|9|7|13|13599.82|0.02|0.03|N|O|1996-08-03|1996-08-02|1996-08-18|NONE|MAIL|olites sleep furious|
+1734|155|3|1|38|40095.70|0.03|0.03|R|F|1994-08-09|1994-09-07|1994-08-12|COLLECT COD|FOB|ts doubt b|
+1734|118|2|2|4|4072.44|0.06|0.03|A|F|1994-08-20|1994-07-17|1994-08-25|DELIVER IN PERSON|AIR|final warhorses.|
+1735|156|7|1|43|45414.45|0.02|0.06|A|F|1993-01-14|1993-03-25|1993-02-02|DELIVER IN PERSON|FOB|iously after the |
+1735|139|5|2|49|50917.37|0.03|0.04|A|F|1992-12-31|1993-02-03|1993-01-25|TAKE BACK RETURN|TRUCK|y express accounts above the exp|
+1760|96|9|1|38|37851.42|0.09|0.03|N|O|1996-06-15|1996-06-29|1996-07-11|NONE|MAIL|tions. blithely regular orbits against the |
+1760|8|9|2|3|2724.00|0.00|0.06|N|O|1996-07-18|1996-07-01|1996-08-01|NONE|RAIL|lyly bold dolphins haggle carefully. sl|
+1760|137|8|3|44|45633.72|0.05|0.01|N|O|1996-06-11|1996-06-16|1996-07-02|COLLECT COD|REG AIR|instructions poach slyly ironic theodolites|
+1761|52|4|1|33|31417.65|0.09|0.03|R|F|1994-01-03|1994-01-23|1994-01-31|NONE|FOB|s. excuses a|
+1761|52|3|2|37|35225.85|0.02|0.07|R|F|1994-02-17|1994-03-08|1994-03-16|NONE|RAIL| integrate. quickly unusual|
+1761|49|6|3|37|35114.48|0.06|0.04|R|F|1994-01-02|1994-03-12|1994-01-25|DELIVER IN PERSON|TRUCK|regular packages wake after|
+1761|73|1|4|49|47680.43|0.06|0.07|R|F|1994-01-08|1994-03-03|1994-02-05|TAKE BACK RETURN|FOB|y even packages promise|
+1761|157|5|5|37|39114.55|0.03|0.04|R|F|1994-04-24|1994-03-14|1994-04-29|TAKE BACK RETURN|MAIL|express requests print blithely around the|
+1761|24|7|6|12|11088.24|0.01|0.05|A|F|1994-04-16|1994-03-08|1994-04-21|DELIVER IN PERSON|AIR| sleep furiously. deposits are acco|
+1761|1|6|7|13|11713.00|0.03|0.08|R|F|1994-03-06|1994-03-18|1994-03-22|DELIVER IN PERSON|TRUCK|ons boost fu|
+1762|26|5|1|15|13890.30|0.04|0.08|A|F|1994-12-18|1994-10-29|1995-01-17|TAKE BACK RETURN|REG AIR|old packages thrash. care|
+1762|50|3|2|39|37051.95|0.10|0.02|A|F|1994-09-12|1994-11-09|1994-10-08|DELIVER IN PERSON|MAIL| ironic platelets sleep along t|
+1762|32|8|3|7|6524.21|0.05|0.01|R|F|1994-09-03|1994-10-02|1994-09-10|NONE|REG AIR|uickly express packages wake slyly-- regul|
+1762|145|2|4|24|25083.36|0.03|0.03|A|F|1994-11-30|1994-11-02|1994-12-20|NONE|REG AIR|accounts solve alongside of the fluffily |
+1762|8|9|5|49|44492.00|0.08|0.05|A|F|1994-10-20|1994-11-02|1994-11-10|TAKE BACK RETURN|SHIP| packages sleep fluffily pen|
+1762|94|7|6|35|34793.15|0.05|0.05|A|F|1994-11-25|1994-10-21|1994-11-28|COLLECT COD|AIR|ind quickly. accounts ca|
+1762|73|3|7|47|45734.29|0.03|0.01|A|F|1994-11-02|1994-10-07|1994-11-08|NONE|SHIP| blithely brave|
+1763|12|9|1|22|20064.22|0.09|0.06|N|O|1997-01-17|1997-01-15|1997-02-03|TAKE BACK RETURN|SHIP|ld. fluffily final ideas boos|
+1763|157|5|2|43|45457.45|0.04|0.04|N|O|1996-11-04|1996-12-09|1996-11-28|DELIVER IN PERSON|FOB|r deposits integrate blithely pending, quic|
+1763|25|10|3|16|14800.32|0.06|0.02|N|O|1996-12-12|1996-12-04|1996-12-25|DELIVER IN PERSON|RAIL|ously pending asymptotes a|
+1763|61|6|4|44|42286.64|0.04|0.05|N|O|1996-12-04|1997-01-06|1996-12-25|DELIVER IN PERSON|REG AIR| instructions need to integrate deposits. |
+1763|147|4|5|13|13612.82|0.03|0.05|N|O|1996-11-23|1997-01-24|1996-12-05|TAKE BACK RETURN|SHIP|s sleep carefully. fluffily unusua|
+1763|143|4|6|3|3129.42|0.05|0.03|N|O|1996-12-10|1996-12-06|1997-01-04|TAKE BACK RETURN|FOB|ut the slyly pending deposi|
+1763|184|5|7|2|2168.36|0.05|0.07|N|O|1997-02-27|1996-12-04|1997-03-27|COLLECT COD|FOB|even pinto beans snooze fluffi|
+1764|121|2|1|20|20422.40|0.09|0.02|A|F|1992-06-09|1992-05-22|1992-07-06|COLLECT COD|MAIL|y quickly regular packages. car|
+1764|67|4|2|3|2901.18|0.07|0.07|R|F|1992-05-13|1992-06-07|1992-05-26|COLLECT COD|RAIL|es wake slowly. |
+1764|78|6|3|27|26407.89|0.07|0.04|A|F|1992-05-06|1992-05-11|1992-05-23|COLLECT COD|TRUCK|ly final foxes wake blithely even requests|
+1765|161|2|1|36|38201.76|0.08|0.04|N|O|1996-03-02|1996-02-17|1996-03-14|DELIVER IN PERSON|SHIP|he blithely pending accou|
+1766|87|8|1|32|31586.56|0.08|0.01|N|O|1997-01-08|1996-11-11|1997-01-31|TAKE BACK RETURN|AIR|ess accounts. stealthily ironic accou|
+1766|34|10|2|12|11208.36|0.05|0.01|N|O|1996-10-28|1996-12-18|1996-11-15|DELIVER IN PERSON|AIR|heodolites above the final, regular acc|
+1766|111|1|3|1|1011.11|0.10|0.02|N|O|1997-01-21|1997-01-07|1997-02-19|NONE|TRUCK|ly blithely pending accounts. reg|
+1767|25|4|1|32|29600.64|0.08|0.04|A|F|1995-05-22|1995-05-14|1995-05-23|COLLECT COD|SHIP|to the bravely ironic requests i|
+1767|42|1|2|1|942.04|0.09|0.05|N|O|1995-06-23|1995-05-25|1995-07-03|TAKE BACK RETURN|RAIL|ing to the slyly fin|
+1767|174|5|3|24|25780.08|0.06|0.03|R|F|1995-03-16|1995-04-29|1995-04-11|DELIVER IN PERSON|RAIL|luffy theodolites need to detect furi|
+1767|23|8|4|50|46151.00|0.01|0.02|R|F|1995-05-29|1995-04-14|1995-06-15|NONE|REG AIR|y unusual foxe|
+1767|52|10|5|40|38082.00|0.06|0.00|R|F|1995-04-16|1995-05-06|1995-04-21|TAKE BACK RETURN|AIR|ep. accounts nag blithely fu|
+1792|88|9|1|9|8892.72|0.09|0.04|R|F|1994-02-28|1993-12-11|1994-03-12|TAKE BACK RETURN|AIR|final packages s|
+1792|9|6|2|5|4545.00|0.04|0.02|R|F|1994-02-13|1994-01-03|1994-02-28|DELIVER IN PERSON|TRUCK|ely regular accounts are slyly. pending, bo|
+1792|9|2|3|8|7272.00|0.01|0.04|A|F|1994-02-21|1994-01-26|1994-02-27|DELIVER IN PERSON|RAIL|nts. fluffily special instructions integr|
+1792|191|3|4|45|49103.55|0.00|0.01|A|F|1994-02-27|1993-12-24|1994-03-07|DELIVER IN PERSON|MAIL|ests are. ironic, regular asy|
+1792|199|2|5|35|38471.65|0.06|0.05|R|F|1994-01-31|1994-01-20|1994-02-17|NONE|FOB|e against the quic|
+1793|48|5|1|29|27493.16|0.01|0.06|R|F|1992-10-24|1992-09-20|1992-11-23|NONE|MAIL|ar excuses. |
+1793|126|9|2|4|4104.48|0.07|0.05|A|F|1992-07-28|1992-08-26|1992-08-21|COLLECT COD|RAIL|nic foxes along the even|
+1793|131|7|3|6|6186.78|0.01|0.05|R|F|1992-09-21|1992-09-05|1992-10-01|DELIVER IN PERSON|REG AIR|uctions; depo|
+1793|118|8|4|4|4072.44|0.00|0.08|R|F|1992-09-27|1992-09-21|1992-10-07|DELIVER IN PERSON|AIR|equests nod ac|
+1793|25|6|5|42|38850.84|0.03|0.03|A|F|1992-10-13|1992-10-02|1992-11-06|NONE|RAIL|uctions sleep carefully special, fl|
+1794|168|9|1|36|38453.76|0.09|0.08|N|O|1997-11-07|1997-11-01|1997-11-18|TAKE BACK RETURN|FOB|ely fluffily ironi|
+1794|95|8|2|3|2985.27|0.02|0.03|N|O|1997-11-15|1997-12-16|1997-11-20|DELIVER IN PERSON|FOB| sentiments according to the q|
+1794|117|8|3|23|23393.53|0.08|0.04|N|O|1997-10-13|1997-11-30|1997-10-28|TAKE BACK RETURN|AIR|usly unusual theodolites doze about |
+1794|85|6|4|34|33492.72|0.06|0.08|N|O|1997-09-29|1997-11-13|1997-10-07|TAKE BACK RETURN|SHIP|rs above the accoun|
+1794|117|4|5|47|47804.17|0.10|0.06|N|O|1998-01-15|1997-11-30|1998-02-14|DELIVER IN PERSON|TRUCK| haggle slyly. furiously express orbit|
+1794|91|3|6|37|36670.33|0.01|0.01|N|O|1998-01-12|1997-12-21|1998-01-17|DELIVER IN PERSON|MAIL|ackages. pinto|
+1795|137|8|1|44|45633.72|0.08|0.08|A|F|1994-04-28|1994-05-24|1994-05-27|NONE|AIR|ites sleep carefully slyly p|
+1795|114|5|2|34|34479.74|0.08|0.00|A|F|1994-04-24|1994-06-01|1994-05-08|DELIVER IN PERSON|SHIP|closely regular instructions wake. |
+1795|168|3|3|25|26704.00|0.07|0.01|A|F|1994-05-18|1994-05-22|1994-05-20|TAKE BACK RETURN|RAIL|he always express accounts ca|
+1795|125|8|4|32|32803.84|0.03|0.06|R|F|1994-05-10|1994-04-21|1994-05-17|DELIVER IN PERSON|SHIP| asymptotes across the bold,|
+1795|163|8|5|11|11694.76|0.08|0.02|R|F|1994-06-19|1994-04-24|1994-07-02|TAKE BACK RETURN|TRUCK|slyly. special pa|
+1796|10|1|1|28|25480.28|0.08|0.04|A|F|1992-12-01|1993-01-01|1992-12-24|DELIVER IN PERSON|FOB|y quickly ironic accounts.|
+1796|185|6|2|8|8681.44|0.00|0.08|R|F|1993-01-07|1993-01-04|1993-01-10|NONE|SHIP|slyly bold accounts are furiously agains|
+1797|31|7|1|17|15827.51|0.01|0.02|N|O|1996-08-06|1996-07-11|1996-08-29|NONE|TRUCK| cajole carefully. unusual Tiresias e|
+1797|145|2|2|16|16722.24|0.01|0.00|N|O|1996-06-03|1996-07-21|1996-06-07|NONE|FOB|o beans wake regular accounts. blit|
+1797|12|9|3|21|19152.21|0.02|0.01|N|O|1996-08-05|1996-08-05|1996-08-06|DELIVER IN PERSON|AIR|ns. regular, regular deposit|
+1798|109|10|1|43|43391.30|0.01|0.08|N|O|1997-08-27|1997-10-23|1997-09-09|DELIVER IN PERSON|MAIL|ld packages sleep furiously. depend|
+1799|52|10|1|8|7616.40|0.04|0.08|R|F|1994-06-14|1994-05-27|1994-06-27|TAKE BACK RETURN|MAIL|ealms upon the special, ironic waters|
+1799|27|10|2|42|38934.84|0.02|0.02|R|F|1994-04-05|1994-04-28|1994-04-09|DELIVER IN PERSON|FOB|es pending |
+1824|120|10|1|45|45905.40|0.03|0.02|R|F|1994-08-21|1994-06-21|1994-09-19|NONE|RAIL|ent Tiresias. quickly express |
+1824|69|4|2|40|38762.40|0.10|0.03|A|F|1994-05-08|1994-07-24|1994-06-06|NONE|FOB|es mold furiously final instructions. s|
+1825|156|1|1|43|45414.45|0.05|0.05|A|F|1994-02-18|1994-02-19|1994-03-02|TAKE BACK RETURN|RAIL| accounts breach fluffily spe|
+1825|148|5|2|39|40877.46|0.00|0.00|R|F|1994-04-01|1994-01-12|1994-04-21|DELIVER IN PERSON|REG AIR|ual, bold ideas haggle above the quickly ir|
+1825|17|4|3|7|6419.07|0.04|0.03|A|F|1994-01-02|1994-01-30|1994-01-30|TAKE BACK RETURN|REG AIR|fully ironic requests. requests cajole ex|
+1825|121|10|4|23|23485.76|0.05|0.01|R|F|1994-01-08|1994-02-08|1994-01-19|NONE|MAIL| wake express, even r|
+1825|178|9|5|33|35579.61|0.04|0.04|A|F|1993-12-07|1994-03-01|1993-12-16|TAKE BACK RETURN|RAIL|about the ne|
+1826|27|10|1|4|3708.08|0.06|0.00|R|F|1992-07-05|1992-06-12|1992-08-04|DELIVER IN PERSON|MAIL|alongside of the quickly unusual re|
+1826|68|3|2|9|8712.54|0.07|0.07|R|F|1992-07-12|1992-07-11|1992-07-15|DELIVER IN PERSON|TRUCK| blithely special|
+1826|176|4|3|14|15066.38|0.05|0.01|A|F|1992-04-28|1992-05-31|1992-05-25|COLLECT COD|TRUCK|uriously bold pinto beans are carefully ag|
+1826|180|9|4|6|6481.08|0.05|0.04|R|F|1992-06-30|1992-05-17|1992-07-30|DELIVER IN PERSON|RAIL|kages. blithely silent|
+1826|135|1|5|46|47615.98|0.05|0.06|R|F|1992-05-02|1992-06-25|1992-05-26|TAKE BACK RETURN|FOB|ously? quickly pe|
+1826|108|3|6|43|43348.30|0.02|0.03|A|F|1992-07-28|1992-06-14|1992-08-03|NONE|MAIL|ss tithes use even ideas. fluffily final t|
+1827|90|1|1|47|46534.23|0.00|0.01|N|O|1996-08-01|1996-08-07|1996-08-23|TAKE BACK RETURN|RAIL|. pending courts about the even e|
+1827|154|9|2|48|50599.20|0.03|0.05|N|O|1996-08-28|1996-09-15|1996-09-01|COLLECT COD|RAIL|oxes. special, final asymptote|
+1827|200|1|3|37|40707.40|0.01|0.07|N|O|1996-07-20|1996-08-18|1996-08-08|DELIVER IN PERSON|REG AIR|ously ironic theodolites serve quickly af|
+1827|127|10|4|4|4108.48|0.04|0.04|N|O|1996-07-22|1996-09-10|1996-08-11|DELIVER IN PERSON|RAIL|special requests. blithely|
+1827|80|10|5|24|23521.92|0.00|0.08|N|O|1996-08-07|1996-09-01|1996-09-04|DELIVER IN PERSON|SHIP|al gifts! re|
+1827|21|2|6|7|6447.14|0.10|0.02|N|O|1996-08-28|1996-08-07|1996-08-31|DELIVER IN PERSON|AIR|egular foxes|
+1827|6|7|7|38|34428.00|0.05|0.01|N|O|1996-10-17|1996-08-29|1996-11-07|TAKE BACK RETURN|SHIP| blithely. express, bo|
+1828|100|4|1|33|33003.30|0.05|0.04|R|F|1994-06-27|1994-06-10|1994-07-24|COLLECT COD|FOB|s boost carefully. pending d|
+1828|13|3|2|40|36520.40|0.08|0.07|R|F|1994-05-05|1994-07-02|1994-05-19|COLLECT COD|REG AIR|s use above the quietly fin|
+1828|196|7|3|11|12058.09|0.07|0.08|R|F|1994-07-21|1994-05-28|1994-08-13|DELIVER IN PERSON|FOB| wake blithely |
+1828|8|3|4|45|40860.00|0.02|0.05|R|F|1994-05-15|1994-05-29|1994-05-28|COLLECT COD|RAIL| accounts run slyly |
+1828|79|7|5|14|13706.98|0.01|0.08|A|F|1994-05-20|1994-06-02|1994-05-25|TAKE BACK RETURN|SHIP|. final packages along the carefully bold|
+1829|150|7|1|12|12601.80|0.05|0.06|A|F|1994-08-23|1994-07-13|1994-09-04|DELIVER IN PERSON|FOB|ges wake furiously express pinto|
+1829|5|6|2|11|9955.00|0.04|0.05|A|F|1994-05-18|1994-06-13|1994-06-07|COLLECT COD|MAIL|ding orbits|
+1829|104|9|3|49|49200.90|0.09|0.08|A|F|1994-08-26|1994-08-01|1994-09-16|NONE|TRUCK|ound the quickly |
+1829|153|4|4|14|14744.10|0.03|0.06|A|F|1994-08-15|1994-06-08|1994-08-30|TAKE BACK RETURN|AIR|regular deposits alongside of the flu|
+1829|166|5|5|6|6396.96|0.02|0.07|A|F|1994-08-09|1994-08-05|1994-09-05|DELIVER IN PERSON|MAIL|s haggle! slyl|
+1829|115|5|6|36|36543.96|0.09|0.04|R|F|1994-06-10|1994-06-23|1994-06-22|NONE|FOB|ackages-- express requests sleep; pen|
+1830|120|4|1|38|38764.56|0.00|0.07|R|F|1995-04-20|1995-05-22|1995-04-24|TAKE BACK RETURN|TRUCK|ely even a|
+1830|25|10|2|9|8325.18|0.05|0.07|R|F|1995-03-09|1995-05-24|1995-03-14|NONE|SHIP|st furiously among |
+1830|82|3|3|36|35354.88|0.07|0.07|R|F|1995-04-21|1995-04-14|1995-05-10|DELIVER IN PERSON|SHIP| slowly unusual orbits. carefull|
+1831|136|2|1|9|9325.17|0.02|0.03|A|F|1993-12-17|1994-01-27|1993-12-26|NONE|TRUCK|mptotes. furiously regular dolphins al|
+1831|48|9|2|9|8532.36|0.07|0.06|R|F|1994-03-22|1994-01-07|1994-04-06|COLLECT COD|MAIL|ent deposits. regular saute|
+1831|115|5|3|17|17256.87|0.02|0.08|R|F|1994-01-18|1994-02-12|1994-01-30|TAKE BACK RETURN|MAIL|s boost ironic foxe|
+1831|95|8|4|23|22887.07|0.06|0.02|R|F|1993-12-21|1994-02-08|1994-01-04|NONE|SHIP|ests. express pinto beans abou|
+1856|55|10|1|10|9550.50|0.05|0.07|R|F|1992-05-11|1992-05-20|1992-06-02|TAKE BACK RETURN|FOB|he furiously even theodolites. account|
+1856|97|10|2|47|46863.23|0.07|0.07|R|F|1992-03-22|1992-06-09|1992-04-17|DELIVER IN PERSON|FOB|ingly blithe theodolites. slyly pending |
+1856|117|7|3|20|20342.20|0.04|0.06|R|F|1992-05-04|1992-05-06|1992-05-11|DELIVER IN PERSON|MAIL|ost carefully. slyly bold accounts|
+1856|150|1|4|22|23103.30|0.08|0.02|A|F|1992-05-02|1992-05-26|1992-05-20|TAKE BACK RETURN|REG AIR|platelets detect slyly regular packages. ca|
+1856|190|1|5|14|15262.66|0.01|0.01|A|F|1992-04-14|1992-05-02|1992-05-11|COLLECT COD|SHIP|ans are even requests. deposits caj|
+1856|23|6|6|36|33228.72|0.03|0.05|A|F|1992-06-19|1992-05-12|1992-06-28|TAKE BACK RETURN|TRUCK|ly even foxes kindle blithely even realm|
+1856|130|3|7|42|43265.46|0.04|0.00|R|F|1992-05-23|1992-06-06|1992-06-19|COLLECT COD|RAIL|usly final deposits|
+1857|174|5|1|15|16112.55|0.10|0.03|R|F|1993-04-05|1993-02-28|1993-04-13|COLLECT COD|RAIL|egular, regular inst|
+1857|167|6|2|40|42686.40|0.10|0.00|R|F|1993-02-15|1993-03-08|1993-02-21|NONE|AIR|slyly close d|
+1857|119|3|3|8|8152.88|0.01|0.07|R|F|1993-01-27|1993-04-04|1993-02-20|TAKE BACK RETURN|AIR|slyly about the fluffily silent req|
+1857|100|3|4|41|41004.10|0.07|0.07|A|F|1993-04-16|1993-02-16|1993-04-18|NONE|REG AIR| the slyly|
+1858|14|8|1|33|30162.33|0.01|0.02|N|O|1997-12-28|1998-02-03|1998-01-13|NONE|RAIL|tect along the slyly final|
+1859|75|6|1|18|17551.26|0.10|0.00|N|O|1997-08-08|1997-06-30|1997-08-26|TAKE BACK RETURN|SHIP|e carefully a|
+1859|188|9|2|36|39174.48|0.02|0.01|N|O|1997-05-05|1997-07-08|1997-05-25|TAKE BACK RETURN|REG AIR|regular requests. carefully unusual theo|
+1859|158|10|3|5|5290.75|0.06|0.03|N|O|1997-06-20|1997-05-20|1997-07-19|TAKE BACK RETURN|AIR|across the p|
+1859|191|2|4|21|22914.99|0.00|0.03|N|O|1997-08-06|1997-05-29|1997-08-26|TAKE BACK RETURN|REG AIR|lar packages wake quickly exp|
+1859|46|3|5|11|10406.44|0.06|0.06|N|O|1997-07-15|1997-06-05|1997-07-29|TAKE BACK RETURN|SHIP|ffily ironic pac|
+1859|105|8|6|12|12061.20|0.08|0.03|N|O|1997-05-22|1997-06-08|1997-06-07|COLLECT COD|TRUCK|es. unusual, silent request|
+1860|113|4|1|9|9117.99|0.04|0.04|N|O|1996-08-03|1996-05-31|1996-08-04|DELIVER IN PERSON|TRUCK|c realms print carefully car|
+1861|68|5|1|7|6776.42|0.08|0.05|A|F|1994-01-14|1994-04-03|1994-01-16|COLLECT COD|RAIL|s foxes. slyly|
+1861|27|8|2|31|28737.62|0.10|0.05|R|F|1994-01-29|1994-03-07|1994-02-15|TAKE BACK RETURN|RAIL|arefully unusual|
+1861|24|9|3|23|21252.46|0.00|0.08|A|F|1994-04-09|1994-03-04|1994-04-11|DELIVER IN PERSON|MAIL|in packages sleep silent dolphins; sly|
+1861|116|6|4|38|38612.18|0.10|0.05|R|F|1994-02-26|1994-02-05|1994-03-01|NONE|RAIL|pending deposits cajole quic|
+1861|16|3|5|2|1832.02|0.03|0.08|R|F|1994-04-26|1994-03-15|1994-05-15|TAKE BACK RETURN|MAIL|e final, regular requests. carefully |
+1862|30|5|1|41|38131.23|0.10|0.00|N|O|1998-06-05|1998-05-17|1998-07-04|COLLECT COD|FOB| carefully along|
+1862|166|7|2|37|39447.92|0.06|0.02|N|O|1998-04-15|1998-05-15|1998-05-14|TAKE BACK RETURN|MAIL|l deposits. carefully even dep|
+1862|104|1|3|26|26106.60|0.02|0.01|N|O|1998-03-25|1998-05-17|1998-04-17|TAKE BACK RETURN|TRUCK|g carefully: thinly ironic deposits af|
+1863|63|2|1|48|46226.88|0.09|0.04|A|F|1993-10-10|1993-12-09|1993-10-19|NONE|FOB|ans hinder furiou|
+1863|157|2|2|48|50743.20|0.04|0.08|A|F|1993-11-08|1993-11-05|1993-12-08|COLLECT COD|AIR|onic theodolites alongside of the pending a|
+1888|98|10|1|27|26948.43|0.03|0.06|R|F|1994-02-13|1994-01-16|1994-02-25|NONE|REG AIR|. carefully special dolphins sle|
+1888|74|5|2|38|37014.66|0.03|0.03|R|F|1993-11-29|1994-01-16|1993-12-08|TAKE BACK RETURN|TRUCK|dazzle carefull|
+1888|80|1|3|49|48023.92|0.07|0.05|A|F|1994-02-27|1994-01-14|1994-03-28|DELIVER IN PERSON|FOB|lar accounts haggle carefu|
+1888|19|10|4|9|8271.09|0.01|0.04|A|F|1994-02-09|1994-01-22|1994-02-19|NONE|AIR| packages are blithely. carefu|
+1888|160|1|5|4|4240.64|0.03|0.06|R|F|1993-12-28|1993-12-19|1994-01-11|COLLECT COD|FOB|lphins. ironically special theodolit|
+1888|53|8|6|48|45746.40|0.08|0.08|R|F|1994-02-28|1993-12-16|1994-03-15|COLLECT COD|TRUCK|ar ideas cajole. regular p|
+1888|167|6|7|50|53358.00|0.04|0.07|R|F|1993-12-22|1994-01-10|1994-01-06|DELIVER IN PERSON|FOB|ependencies affix blithely regular warhors|
+1889|152|4|1|41|43138.15|0.10|0.02|N|O|1997-06-15|1997-05-10|1997-07-08|NONE|AIR|s! furiously pending r|
+1889|172|3|2|13|13938.21|0.05|0.00|N|O|1997-06-12|1997-04-28|1997-06-23|NONE|REG AIR|to the regular accounts. carefully express|
+1889|138|9|3|36|37372.68|0.05|0.07|N|O|1997-05-19|1997-06-14|1997-05-23|NONE|SHIP|l pinto beans kindle |
+1889|168|5|4|5|5340.80|0.02|0.07|N|O|1997-06-26|1997-06-09|1997-07-21|COLLECT COD|AIR|ording to the blithely silent r|
+1890|141|8|1|26|27069.64|0.03|0.07|N|O|1997-04-02|1997-03-13|1997-04-22|DELIVER IN PERSON|FOB|ngage. slyly ironic |
+1890|100|1|2|43|43004.30|0.07|0.03|N|O|1996-12-30|1997-01-31|1997-01-19|DELIVER IN PERSON|FOB|p ironic, express accounts. fu|
+1890|59|1|3|24|23017.20|0.06|0.04|N|O|1997-02-09|1997-02-10|1997-02-12|COLLECT COD|MAIL|is wake carefully above the even id|
+1890|68|9|4|43|41626.58|0.09|0.04|N|O|1997-04-08|1997-02-19|1997-04-30|TAKE BACK RETURN|FOB|lyly. instructions across the furiously|
+1890|122|3|5|45|45995.40|0.08|0.05|N|O|1997-04-15|1997-03-16|1997-04-19|COLLECT COD|FOB|he carefully regular sauternes. ironic fret|
+1890|181|2|6|16|17298.88|0.08|0.02|N|O|1997-02-13|1997-02-18|1997-03-12|TAKE BACK RETURN|TRUCK|ged pinto beans. regular, regular id|
+1890|121|4|7|10|10211.20|0.01|0.04|N|O|1996-12-24|1997-02-19|1997-01-01|DELIVER IN PERSON|AIR|. even, unusual inst|
+1891|77|8|1|45|43968.15|0.07|0.04|A|F|1994-12-20|1995-01-16|1995-01-05|NONE|RAIL|ests along|
+1891|184|5|2|18|19515.24|0.06|0.00|A|F|1995-01-24|1995-01-29|1995-02-14|NONE|RAIL| foxes above the carefu|
+1891|198|9|3|15|16472.85|0.03|0.00|R|F|1995-03-11|1995-03-05|1995-03-18|TAKE BACK RETURN|MAIL| accounts are furiou|
+1892|113|7|1|48|48629.28|0.02|0.01|A|F|1994-06-16|1994-06-16|1994-06-28|NONE|RAIL|tornis detect regul|
+1892|43|2|2|35|33006.40|0.04|0.08|R|F|1994-04-05|1994-05-09|1994-05-03|NONE|MAIL|hes nod furiously around the instruc|
+1892|134|5|3|37|38262.81|0.10|0.03|R|F|1994-04-11|1994-06-04|1994-04-24|TAKE BACK RETURN|SHIP|nts. slyly regular asymptot|
+1892|197|9|4|14|15360.66|0.06|0.07|R|F|1994-04-08|1994-06-12|1994-04-27|DELIVER IN PERSON|FOB|furiously about the furiously|
+1893|99|1|1|43|42960.87|0.10|0.00|N|O|1998-01-25|1998-01-06|1998-02-14|COLLECT COD|SHIP|he carefully regular |
+1893|148|9|2|49|51358.86|0.03|0.05|N|O|1998-01-19|1998-01-28|1998-02-02|TAKE BACK RETURN|FOB|y final foxes bo|
+1893|45|6|3|3|2835.12|0.03|0.02|N|O|1998-02-10|1998-01-18|1998-02-25|DELIVER IN PERSON|MAIL|gular, even ideas. fluffily bol|
+1893|101|6|4|18|18019.80|0.07|0.06|N|O|1998-01-24|1998-01-12|1998-02-13|TAKE BACK RETURN|RAIL|g packages. fluffily final reques|
+1893|53|4|5|6|5718.30|0.10|0.02|N|O|1998-01-23|1997-12-22|1998-02-09|DELIVER IN PERSON|TRUCK|ar accounts use. daringly ironic packag|
+1894|169|10|1|40|42766.40|0.03|0.07|R|F|1992-06-07|1992-05-11|1992-07-01|DELIVER IN PERSON|FOB|ily furiously bold packages. flu|
+1895|161|6|1|43|45629.88|0.09|0.07|R|F|1994-07-26|1994-07-19|1994-08-11|NONE|AIR| carefully eve|
+1920|96|7|1|24|23906.16|0.04|0.05|N|O|1998-09-27|1998-08-23|1998-10-15|DELIVER IN PERSON|AIR|thely. bold, pend|
+1920|51|6|2|31|29482.55|0.05|0.06|N|O|1998-08-01|1998-08-30|1998-08-17|COLLECT COD|SHIP|lly. ideas wa|
+1920|18|2|3|6|5508.06|0.01|0.05|N|O|1998-10-01|1998-08-20|1998-10-24|COLLECT COD|SHIP|l ideas boost slyly pl|
+1920|84|5|4|50|49204.00|0.09|0.06|N|O|1998-10-03|1998-08-04|1998-10-29|DELIVER IN PERSON|MAIL|e blithely unusual foxes. brave packages|
+1920|34|10|5|14|13076.42|0.08|0.05|N|O|1998-10-22|1998-08-10|1998-10-27|DELIVER IN PERSON|AIR|ickly ironic d|
+1921|21|10|1|9|8289.18|0.08|0.00|R|F|1994-02-01|1994-03-20|1994-03-01|DELIVER IN PERSON|FOB|to beans. even excuses integrate specia|
+1921|140|6|2|21|21842.94|0.02|0.06|R|F|1994-02-08|1994-03-28|1994-02-15|COLLECT COD|FOB|ckly regula|
+1921|71|2|3|27|26218.89|0.00|0.04|A|F|1994-04-26|1994-04-07|1994-04-30|TAKE BACK RETURN|FOB|ing pinto beans above the pend|
+1922|10|5|1|13|11830.13|0.05|0.03|N|O|1996-10-24|1996-09-21|1996-11-15|NONE|SHIP|quests. furiously|
+1923|37|8|1|9|8433.27|0.01|0.08|N|O|1997-08-29|1997-09-13|1997-09-07|NONE|FOB|lites. ironic instructions integrate bravel|
+1923|178|8|2|23|24797.91|0.07|0.05|N|O|1997-09-08|1997-08-11|1997-09-14|TAKE BACK RETURN|MAIL|aggle carefully. furiously permanent|
+1923|180|1|3|11|11881.98|0.03|0.03|N|O|1997-07-12|1997-09-04|1997-08-01|TAKE BACK RETURN|REG AIR|ages wake slyly about the furiously regular|
+1923|193|5|4|49|53566.31|0.06|0.05|N|O|1997-07-21|1997-08-08|1997-07-26|NONE|AIR|de of the carefully expre|
+1923|184|5|5|25|27104.50|0.10|0.08|N|O|1997-08-18|1997-08-20|1997-09-12|DELIVER IN PERSON|TRUCK|the ideas: slyly pendin|
+1923|37|3|6|50|46851.50|0.03|0.03|N|O|1997-11-04|1997-08-08|1997-11-25|NONE|TRUCK|uickly along the bold courts. bold the|
+1924|73|1|1|7|6811.49|0.06|0.07|N|O|1997-01-01|1996-12-02|1997-01-08|COLLECT COD|SHIP|osits. even accounts nag furious|
+1924|18|8|2|47|43146.47|0.02|0.06|N|O|1996-11-24|1996-10-18|1996-12-13|COLLECT COD|REG AIR|silent requests cajole blithely final pack|
+1924|57|8|3|40|38282.00|0.04|0.08|N|O|1996-10-31|1996-11-30|1996-11-21|NONE|REG AIR|ains sleep carefully|
+1924|34|5|4|31|28954.93|0.03|0.03|N|O|1996-09-20|1996-10-19|1996-10-19|DELIVER IN PERSON|SHIP| the slyly regular foxes. ruthle|
+1924|36|7|5|17|15912.51|0.04|0.05|N|O|1996-12-31|1996-11-12|1997-01-25|COLLECT COD|TRUCK|e carefully theodolites. ironically ironic |
+1924|76|4|6|15|14641.05|0.02|0.04|N|O|1997-01-04|1996-11-13|1997-01-27|NONE|SHIP|he package|
+1924|40|1|7|21|19740.84|0.09|0.03|N|O|1996-09-21|1996-11-12|1996-10-02|TAKE BACK RETURN|AIR| blithely reg|
+1925|184|5|1|50|54209.00|0.01|0.02|R|F|1992-04-12|1992-04-23|1992-05-08|TAKE BACK RETURN|TRUCK|usual pinto|
+1925|135|1|2|35|36229.55|0.06|0.06|R|F|1992-05-11|1992-04-10|1992-05-14|TAKE BACK RETURN|AIR|counts. carefully ironic packages boost ab|
+1925|116|10|3|40|40644.40|0.08|0.08|A|F|1992-05-17|1992-05-20|1992-06-08|TAKE BACK RETURN|AIR|e carefully regul|
+1925|30|5|4|17|15810.51|0.06|0.02|R|F|1992-05-18|1992-04-06|1992-06-16|TAKE BACK RETURN|MAIL|instructions sleep. pinto bea|
+1926|51|9|1|24|22825.20|0.06|0.05|N|O|1996-05-04|1996-03-14|1996-06-01|DELIVER IN PERSON|RAIL|e theodolites.|
+1926|106|9|2|29|29176.90|0.09|0.08|N|O|1996-02-26|1996-03-14|1996-03-14|TAKE BACK RETURN|TRUCK|es. dependencies according to the fl|
+1926|178|6|3|10|10781.70|0.02|0.03|N|O|1996-05-23|1996-03-02|1996-06-04|NONE|AIR|usly bold accounts. express accounts|
+1926|68|9|4|13|12584.78|0.04|0.02|N|O|1996-04-26|1996-04-13|1996-05-08|DELIVER IN PERSON|MAIL|eans wake bli|
+1926|40|1|5|29|27261.16|0.06|0.00|N|O|1996-02-29|1996-03-13|1996-03-24|DELIVER IN PERSON|MAIL|hily unusual packages are fluffily am|
+1927|68|5|1|3|2904.18|0.00|0.05|N|O|1995-10-06|1995-12-08|1995-11-05|COLLECT COD|FOB|ccounts affi|
+1927|73|2|2|15|14596.05|0.08|0.08|N|O|1995-12-25|1995-12-26|1995-12-31|COLLECT COD|RAIL| carefully regular requests sleep car|
+1927|65|10|3|6|5790.36|0.05|0.05|N|O|1995-11-29|1995-11-20|1995-12-08|TAKE BACK RETURN|TRUCK|furiously even wat|
+1952|53|8|1|7|6671.35|0.04|0.05|A|F|1994-05-06|1994-06-11|1994-05-12|NONE|RAIL|about the express, even requ|
+1952|142|5|2|6|6252.84|0.06|0.05|A|F|1994-05-09|1994-05-21|1994-05-26|DELIVER IN PERSON|AIR|packages haggle. |
+1953|128|1|1|25|25703.00|0.07|0.06|A|F|1994-01-07|1994-01-28|1994-01-29|TAKE BACK RETURN|RAIL|ular, regular i|
+1953|14|5|2|35|31990.35|0.06|0.06|R|F|1994-02-03|1994-02-25|1994-02-14|DELIVER IN PERSON|FOB|among the fur|
+1954|152|7|1|31|32616.65|0.06|0.06|N|O|1997-08-18|1997-07-07|1997-09-03|DELIVER IN PERSON|RAIL|against the packages. bold, ironic e|
+1954|182|3|2|1|1082.18|0.03|0.01|N|O|1997-09-16|1997-07-08|1997-10-07|COLLECT COD|MAIL|te. furiously final deposits hag|
+1954|199|2|3|11|12091.09|0.07|0.07|N|O|1997-08-07|1997-07-23|1997-08-25|DELIVER IN PERSON|TRUCK|y carefully ironi|
+1954|159|4|4|12|12709.80|0.02|0.08|N|O|1997-07-19|1997-07-04|1997-08-06|COLLECT COD|AIR|ongside of the slyly unusual requests. reg|
+1954|170|7|5|29|31034.93|0.08|0.08|N|O|1997-08-25|1997-07-15|1997-09-02|DELIVER IN PERSON|RAIL|use thinly furiously regular asy|
+1954|177|8|6|13|14003.21|0.00|0.07|N|O|1997-06-15|1997-08-22|1997-06-20|TAKE BACK RETURN|MAIL|y ironic instructions cajole|
+1954|194|5|7|49|53615.31|0.05|0.06|N|O|1997-06-04|1997-08-29|1997-06-14|COLLECT COD|TRUCK|eans. final pinto beans sleep furiousl|
+1955|137|3|1|32|33188.16|0.02|0.02|A|F|1992-07-05|1992-06-29|1992-08-03|TAKE BACK RETURN|TRUCK|g to the carefully sile|
+1955|18|8|2|2|1836.02|0.03|0.01|R|F|1992-07-06|1992-07-06|1992-08-01|COLLECT COD|TRUCK|ickly aroun|
+1955|158|6|3|41|43384.15|0.08|0.06|A|F|1992-08-01|1992-06-04|1992-08-07|COLLECT COD|AIR| carefully against the furiously reg|
+1955|9|4|4|16|14544.00|0.03|0.07|A|F|1992-04-30|1992-06-23|1992-05-23|TAKE BACK RETURN|FOB|odolites eat s|
+1955|159|10|5|11|11650.65|0.09|0.01|A|F|1992-06-03|1992-07-04|1992-06-07|NONE|REG AIR|ously quickly pendi|
+1956|177|8|1|8|8617.36|0.02|0.04|A|F|1992-12-25|1992-11-24|1993-01-12|TAKE BACK RETURN|AIR|efully about the ironic, ironic de|
+1956|103|6|2|16|16049.60|0.00|0.05|R|F|1992-11-11|1992-11-11|1992-11-30|NONE|FOB|es cajole blithely. pen|
+1956|139|5|3|39|40526.07|0.08|0.02|A|F|1992-09-24|1992-11-26|1992-10-15|DELIVER IN PERSON|REG AIR|r theodolites sleep above the b|
+1956|29|10|4|11|10219.22|0.10|0.00|A|F|1992-12-19|1992-10-29|1993-01-07|TAKE BACK RETURN|AIR| the braids slee|
+1956|155|10|5|16|16882.40|0.08|0.02|R|F|1992-09-28|1992-10-21|1992-09-30|TAKE BACK RETURN|FOB| wake after the |
+1957|79|9|1|50|48953.50|0.09|0.05|N|O|1998-08-08|1998-09-28|1998-08-27|COLLECT COD|FOB|gainst the re|
+1957|119|3|2|31|31592.41|0.10|0.08|N|O|1998-08-13|1998-08-31|1998-08-16|NONE|REG AIR|express packages maintain fluffi|
+1958|73|2|1|9|8757.63|0.01|0.05|N|O|1995-12-08|1995-12-17|1995-12-18|DELIVER IN PERSON|REG AIR|ickly. slyly bold |
+1958|176|7|2|29|31208.93|0.05|0.06|N|O|1996-01-19|1995-12-05|1996-02-14|COLLECT COD|SHIP|d pinto beans|
+1958|102|3|3|4|4008.40|0.04|0.02|N|O|1995-10-24|1995-12-09|1995-10-28|DELIVER IN PERSON|AIR|he slyly even dependencies |
+1958|83|4|4|38|37357.04|0.09|0.07|N|O|1995-10-09|1995-11-26|1995-11-05|COLLECT COD|TRUCK|yly. slyly regular courts use silentl|
+1958|101|8|5|31|31034.10|0.08|0.01|N|O|1995-10-31|1995-11-12|1995-11-07|TAKE BACK RETURN|TRUCK|r deposits c|
+1958|17|4|6|44|40348.44|0.08|0.04|N|O|1995-12-17|1995-11-30|1996-01-15|TAKE BACK RETURN|RAIL|c theodolites after the unusual deposit|
+1958|39|5|7|29|27231.87|0.02|0.05|N|O|1995-10-14|1995-11-06|1995-11-01|NONE|REG AIR|final requests nag according to the |
+1959|169|10|1|46|49181.36|0.04|0.00|N|O|1997-05-05|1997-03-03|1997-05-24|TAKE BACK RETURN|AIR| furiously ex|
+1959|120|7|2|15|15301.80|0.08|0.07|N|O|1997-01-20|1997-02-18|1997-02-08|DELIVER IN PERSON|MAIL| quickly sp|
+1984|53|5|1|45|42887.25|0.03|0.04|N|O|1998-04-09|1998-06-11|1998-05-01|COLLECT COD|AIR|p. quickly final ideas sle|
+1984|70|7|2|35|33952.45|0.01|0.07|N|O|1998-05-18|1998-05-04|1998-06-01|COLLECT COD|RAIL|tes. quickly pending packages haggle boldl|
+1985|28|1|1|33|30624.66|0.10|0.03|R|F|1994-12-04|1994-11-01|1994-12-05|DELIVER IN PERSON|FOB|s are express packages. pendin|
+1985|21|6|2|50|46051.00|0.04|0.02|R|F|1994-09-30|1994-10-18|1994-10-12|COLLECT COD|AIR|ate carefully. carefully|
+1985|134|10|3|20|20682.60|0.07|0.03|R|F|1994-10-29|1994-11-12|1994-11-27|NONE|TRUCK|regular requests. furiously express|
+1985|199|10|4|30|32975.70|0.05|0.07|R|F|1994-09-06|1994-10-10|1994-09-26|NONE|RAIL|uickly. instr|
+1985|124|9|5|42|43013.04|0.05|0.05|R|F|1994-10-25|1994-11-03|1994-11-19|DELIVER IN PERSON|SHIP| patterns? final requests after the sp|
+1985|20|7|6|2|1840.04|0.02|0.00|A|F|1994-11-25|1994-10-09|1994-12-25|TAKE BACK RETURN|FOB| silent inst|
+1986|92|3|1|12|11905.08|0.06|0.05|A|F|1994-08-17|1994-06-28|1994-09-02|COLLECT COD|RAIL|sleep furiously fluffily final|
+1986|105|8|2|10|10051.00|0.10|0.03|R|F|1994-05-14|1994-06-21|1994-06-02|COLLECT COD|REG AIR|yly into the carefully even |
+1986|63|2|3|14|13482.84|0.04|0.02|R|F|1994-07-14|1994-06-19|1994-08-08|NONE|SHIP|the packages. pending, unusual|
+1987|16|6|1|7|6412.07|0.03|0.03|A|F|1994-07-30|1994-07-06|1994-08-29|NONE|REG AIR| regular a|
+1988|72|1|1|36|34994.52|0.09|0.04|N|O|1996-01-21|1995-11-24|1996-01-27|NONE|RAIL|gular theodolites. |
+1988|199|3|2|19|20884.61|0.08|0.08|N|O|1996-02-03|1995-12-10|1996-02-14|COLLECT COD|FOB|lly about the slyly thin instructions. f|
+1988|54|6|3|8|7632.40|0.06|0.01|N|O|1995-10-20|1995-11-11|1995-11-18|DELIVER IN PERSON|AIR|le quickly ac|
+1988|36|2|4|27|25272.81|0.08|0.00|N|O|1996-01-27|1995-12-24|1996-02-24|TAKE BACK RETURN|TRUCK|uests. regular requests are according to t|
+1988|79|8|5|26|25455.82|0.08|0.04|N|O|1996-01-25|1995-12-15|1996-01-26|COLLECT COD|SHIP| ironic dolphins haggl|
+1988|86|7|6|9|8874.72|0.08|0.03|N|O|1995-12-26|1996-01-02|1996-01-25|DELIVER IN PERSON|MAIL|lar platelets. slyly ironic packa|
+1989|10|7|1|47|42770.47|0.10|0.02|R|F|1994-06-21|1994-05-27|1994-06-22|TAKE BACK RETURN|REG AIR|final deposits s|
+1990|101|2|1|46|46050.60|0.01|0.07|R|F|1994-12-29|1995-03-14|1995-01-13|NONE|TRUCK|ar sentiments.|
+1991|110|3|1|39|39394.29|0.06|0.02|A|F|1993-01-01|1992-11-29|1993-01-10|TAKE BACK RETURN|TRUCK|ckages? carefully bold depos|
+1991|53|1|2|49|46699.45|0.08|0.06|R|F|1992-10-19|1992-11-29|1992-10-25|NONE|SHIP|nd the ideas affi|
+1991|174|5|3|6|6445.02|0.02|0.01|A|F|1992-11-02|1992-10-08|1992-11-14|TAKE BACK RETURN|REG AIR|hes nag slyly|
+1991|138|9|4|6|6228.78|0.10|0.06|A|F|1992-11-21|1992-11-03|1992-11-27|NONE|RAIL|uickly blithely final de|
+1991|60|8|5|49|47042.94|0.06|0.00|R|F|1992-09-10|1992-11-30|1992-10-07|NONE|AIR|quests cajole blithely|
+2016|147|4|1|2|2094.28|0.02|0.07|N|O|1996-10-12|1996-11-09|1996-10-31|DELIVER IN PERSON|TRUCK|carefully according to the |
+2016|63|8|2|15|14445.90|0.04|0.05|N|O|1996-09-24|1996-10-05|1996-10-21|TAKE BACK RETURN|MAIL|uests haggle carefully furiously regul|
+2016|122|7|3|8|8176.96|0.09|0.02|N|O|1996-09-19|1996-10-21|1996-10-13|TAKE BACK RETURN|SHIP|mptotes haggle ideas. packages wake flu|
+2017|103|4|1|49|49151.90|0.10|0.06|N|O|1998-05-26|1998-07-01|1998-06-06|COLLECT COD|TRUCK| after the unusual instructions. sly|
+2017|71|2|2|14|13594.98|0.07|0.04|N|O|1998-06-28|1998-06-15|1998-07-11|NONE|TRUCK|ily final w|
+2017|84|5|3|11|10824.88|0.05|0.02|N|O|1998-05-22|1998-07-13|1998-05-26|TAKE BACK RETURN|TRUCK|gside of the slyly dogged dolp|
+2018|195|6|1|2|2190.38|0.02|0.07|N|O|1995-06-25|1995-06-20|1995-07-04|NONE|TRUCK|ly ironic accounts against the slyly sly|
+2018|129|10|2|23|23669.76|0.05|0.01|R|F|1995-05-05|1995-05-12|1995-05-22|TAKE BACK RETURN|RAIL|ingly even theodolites s|
+2019|4|9|1|31|28024.00|0.07|0.03|R|F|1992-11-18|1992-12-26|1992-11-24|DELIVER IN PERSON|FOB|l ideas across the slowl|
+2019|52|7|2|18|17136.90|0.04|0.03|R|F|1993-01-24|1992-12-22|1993-02-02|NONE|MAIL|are carefully furiously regular requ|
+2020|34|10|1|50|46701.50|0.06|0.01|R|F|1993-07-12|1993-08-28|1993-08-02|COLLECT COD|TRUCK|ts against the pending ideas serve along|
+2020|176|4|2|40|43046.80|0.09|0.00|A|F|1993-10-17|1993-09-14|1993-10-29|TAKE BACK RETURN|RAIL|ently across the|
+2020|14|4|3|30|27420.30|0.07|0.04|A|F|1993-09-08|1993-08-11|1993-09-29|TAKE BACK RETURN|AIR|ly about the blithely ironic foxes. bold|
+2020|61|8|4|27|25948.62|0.05|0.06|A|F|1993-07-14|1993-09-02|1993-08-03|NONE|FOB|e of the bold foxes haggle |
+2021|85|6|1|7|6895.56|0.08|0.04|N|O|1995-10-17|1995-09-29|1995-10-20|NONE|MAIL| accounts boost blithely. blithely reg|
+2021|166|3|2|19|20257.04|0.04|0.05|N|O|1995-08-14|1995-09-05|1995-08-23|NONE|RAIL| above the slyly fl|
+2022|169|8|1|38|40628.08|0.00|0.08|R|F|1992-07-05|1992-04-20|1992-07-13|TAKE BACK RETURN|REG AIR| against the express accounts wake ca|
+2022|55|3|2|38|36291.90|0.05|0.04|R|F|1992-06-17|1992-05-15|1992-06-28|COLLECT COD|SHIP|instructions dazzle carefull|
+2022|49|10|3|48|45553.92|0.10|0.02|A|F|1992-06-14|1992-06-04|1992-07-12|DELIVER IN PERSON|SHIP|counts. slyly enticing accounts are during |
+2022|182|3|4|16|17314.88|0.05|0.03|R|F|1992-06-23|1992-05-22|1992-07-07|NONE|TRUCK|ages wake slyly care|
+2022|100|1|5|36|36003.60|0.05|0.02|R|F|1992-03-24|1992-05-07|1992-04-13|NONE|MAIL|ly after the foxes. regular, final inst|
+2022|129|2|6|20|20582.40|0.08|0.08|A|F|1992-03-31|1992-04-17|1992-04-02|NONE|SHIP|r deposits kindle |
+2022|78|9|7|13|12714.91|0.06|0.08|R|F|1992-04-04|1992-05-30|1992-04-21|NONE|FOB| orbits haggle fluffily fl|
+2023|127|10|1|9|9244.08|0.05|0.04|R|F|1992-06-04|1992-06-30|1992-06-10|NONE|AIR|ly regular pinto beans poa|
+2023|38|4|2|2|1876.06|0.01|0.00|R|F|1992-08-27|1992-07-16|1992-08-29|DELIVER IN PERSON|RAIL|ing packages. fluffily silen|
+2023|19|6|3|25|22975.25|0.10|0.03|A|F|1992-07-19|1992-07-07|1992-08-15|NONE|REG AIR| wake furiously among the slyly final|
+2023|185|6|4|9|9766.62|0.02|0.00|A|F|1992-07-23|1992-07-04|1992-08-20|TAKE BACK RETURN|AIR|nts maintain blithely alongside of the|
+2023|20|10|5|22|20240.44|0.04|0.06|A|F|1992-06-15|1992-07-13|1992-06-21|TAKE BACK RETURN|SHIP|ronic attainments. |
+2023|43|2|6|29|27348.16|0.02|0.06|A|F|1992-08-29|1992-07-28|1992-09-18|COLLECT COD|RAIL|usual instructions. bli|
+2023|134|10|7|50|51706.50|0.00|0.03|R|F|1992-06-20|1992-07-04|1992-06-23|DELIVER IN PERSON|FOB|its! carefully ex|
+2048|35|1|1|7|6545.21|0.06|0.01|R|F|1993-12-07|1994-01-31|1994-01-05|TAKE BACK RETURN|REG AIR|lent platelets boost deposits. carefully sp|
+2048|8|5|2|5|4540.00|0.04|0.04|A|F|1994-01-18|1994-02-01|1994-01-29|TAKE BACK RETURN|TRUCK|affix carefully against |
+2048|101|2|3|12|12013.20|0.01|0.05|R|F|1994-01-28|1994-01-19|1994-02-08|NONE|AIR| even theodoli|
+2048|97|1|4|11|10967.99|0.10|0.03|R|F|1993-12-20|1994-01-19|1994-01-04|TAKE BACK RETURN|MAIL|totes. idly ironic packages nag|
+2049|189|10|1|25|27229.50|0.08|0.00|N|O|1996-03-31|1996-02-29|1996-04-15|DELIVER IN PERSON|MAIL| excuses above the |
+2049|35|1|2|31|28985.93|0.10|0.05|N|O|1995-12-25|1996-02-25|1995-12-29|TAKE BACK RETURN|MAIL| packages are slyly alongside|
+2049|67|6|3|18|17407.08|0.05|0.05|N|O|1996-01-09|1996-01-22|1996-01-25|TAKE BACK RETURN|AIR| sleep fluffily. dependencies use never|
+2049|6|7|4|39|35334.00|0.02|0.05|N|O|1996-01-17|1996-01-21|1996-02-03|TAKE BACK RETURN|MAIL|the even pinto beans |
+2049|126|1|5|30|30783.60|0.04|0.06|N|O|1995-12-16|1996-02-04|1995-12-22|NONE|TRUCK|ial accounts are among the furiously perma|
+2049|84|5|6|17|16729.36|0.07|0.00|N|O|1996-02-04|1996-03-01|1996-02-24|NONE|FOB|al, regular foxes. pending, |
+2050|73|2|1|47|45734.29|0.05|0.03|A|F|1994-08-25|1994-07-18|1994-09-15|DELIVER IN PERSON|TRUCK|tside the blithely pending packages eat f|
+2050|152|3|2|48|50503.20|0.05|0.01|A|F|1994-09-30|1994-08-23|1994-10-29|COLLECT COD|AIR| final packages. pinto|
+2050|113|4|3|41|41537.51|0.10|0.04|A|F|1994-06-08|1994-08-27|1994-06-23|NONE|AIR| final theodolites. depende|
+2050|32|8|4|11|10252.33|0.02|0.01|A|F|1994-07-27|1994-08-18|1994-08-02|DELIVER IN PERSON|REG AIR|ns. bold, final ideas cajole among the fi|
+2050|168|9|5|16|17090.56|0.07|0.01|R|F|1994-08-17|1994-07-28|1994-09-05|DELIVER IN PERSON|REG AIR|al accounts. closely even |
+2050|49|2|6|29|27522.16|0.00|0.05|A|F|1994-09-23|1994-08-01|1994-10-23|TAKE BACK RETURN|MAIL|oxes alongsid|
+2050|48|5|7|25|23701.00|0.10|0.00|R|F|1994-08-18|1994-07-04|1994-09-04|TAKE BACK RETURN|RAIL|y according to |
+2051|25|6|1|43|39775.86|0.08|0.04|N|O|1996-04-22|1996-06-16|1996-04-28|COLLECT COD|RAIL|ounts sleep fluffily even requ|
+2051|130|1|2|48|49446.24|0.01|0.02|N|O|1996-05-04|1996-06-14|1996-05-19|NONE|TRUCK|unts. pending platelets believe about|
+2052|68|7|1|50|48403.00|0.09|0.08|R|F|1992-06-22|1992-06-03|1992-07-19|DELIVER IN PERSON|AIR|wake after the decoy|
+2052|135|1|2|35|36229.55|0.09|0.05|A|F|1992-05-29|1992-05-24|1992-06-11|NONE|TRUCK|ts according t|
+2052|43|2|3|16|15088.64|0.01|0.08|A|F|1992-06-30|1992-07-09|1992-07-12|NONE|SHIP|y final deposits cajole according |
+2052|96|7|4|47|46816.23|0.08|0.01|A|F|1992-06-18|1992-05-16|1992-07-02|TAKE BACK RETURN|REG AIR|final requests. stealt|
+2053|101|4|1|20|20022.00|0.09|0.00|A|F|1995-04-25|1995-04-12|1995-05-13|NONE|TRUCK|ly ironic foxes haggle slyly speci|
+2053|33|4|2|34|31723.02|0.07|0.00|A|F|1995-03-15|1995-03-20|1995-04-09|TAKE BACK RETURN|TRUCK|ions. unusual dependencies|
+2053|65|2|3|46|44392.76|0.01|0.03|R|F|1995-04-01|1995-04-02|1995-04-18|NONE|RAIL|tions. furiously even requests hagg|
+2053|121|6|4|31|31654.72|0.06|0.08|R|F|1995-03-23|1995-03-13|1995-04-16|DELIVER IN PERSON|SHIP|ts. fluffily final mul|
+2054|113|4|1|11|11144.21|0.03|0.05|R|F|1992-08-13|1992-08-26|1992-08-22|NONE|AIR|ular accou|
+2054|120|7|2|31|31623.72|0.05|0.08|A|F|1992-08-18|1992-09-04|1992-08-24|NONE|FOB|se bold, regular accounts. unusual depos|
+2054|121|2|3|32|32675.84|0.06|0.00|A|F|1992-06-23|1992-07-08|1992-07-22|NONE|FOB| packages thrash. carefully final|
+2054|174|3|4|14|15038.38|0.10|0.05|R|F|1992-06-25|1992-09-05|1992-07-14|DELIVER IN PERSON|SHIP|uickly final|
+2054|6|1|5|40|36240.00|0.08|0.06|R|F|1992-06-23|1992-08-09|1992-07-04|TAKE BACK RETURN|RAIL|n pinto beans. ironic courts are iro|
+2054|134|10|6|17|17580.21|0.08|0.01|A|F|1992-06-09|1992-08-28|1992-06-16|NONE|AIR|ges nag acc|
+2054|11|1|7|4|3644.04|0.00|0.08|R|F|1992-08-12|1992-08-31|1992-08-15|DELIVER IN PERSON|AIR|lyly careful requests wake fl|
+2055|45|6|1|15|14175.60|0.04|0.06|A|F|1993-09-15|1993-10-06|1993-10-07|NONE|REG AIR|furiously bold |
+2055|9|10|2|15|13635.00|0.06|0.05|R|F|1993-10-30|1993-11-21|1993-11-22|COLLECT COD|RAIL|gular foxes. b|
+2055|135|1|3|12|12421.56|0.00|0.02|A|F|1993-10-26|1993-11-23|1993-11-22|COLLECT COD|TRUCK|al pains. acco|
+2055|134|10|4|16|16546.08|0.02|0.02|A|F|1993-11-16|1993-11-12|1993-11-28|NONE|TRUCK|arefully daringly regular accounts.|
+2080|7|4|1|5|4535.00|0.08|0.05|R|F|1993-08-26|1993-08-07|1993-09-02|DELIVER IN PERSON|TRUCK|refully unusual theo|
+2080|197|9|2|39|42790.41|0.07|0.04|A|F|1993-08-22|1993-09-09|1993-08-23|COLLECT COD|FOB|ic deposits haggle slyly carefully eve|
+2081|89|10|1|26|25716.08|0.03|0.08|N|O|1997-10-21|1997-10-03|1997-11-10|NONE|FOB|among the slyly express accounts. silen|
+2081|149|2|2|13|13638.82|0.07|0.05|N|O|1997-08-23|1997-08-22|1997-09-09|TAKE BACK RETURN|MAIL|fter the even deposi|
+2081|13|10|3|32|29216.32|0.09|0.07|N|O|1997-09-05|1997-09-26|1997-10-03|TAKE BACK RETURN|SHIP|e. final, regular dependencies sleep slyly!|
+2081|85|6|4|23|22656.84|0.03|0.08|N|O|1997-07-06|1997-09-11|1997-07-21|TAKE BACK RETURN|MAIL|ual requests wake blithely above the|
+2081|113|7|5|19|19249.09|0.02|0.06|N|O|1997-10-01|1997-08-12|1997-10-18|COLLECT COD|SHIP|s affix sometimes express requests. quickly|
+2081|142|9|6|31|32306.34|0.03|0.06|N|O|1997-09-19|1997-09-13|1997-09-27|NONE|AIR| silent, spe|
+2082|75|3|1|36|35102.52|0.00|0.00|R|F|1995-01-20|1995-03-18|1995-01-31|COLLECT COD|MAIL|haggle furiously silent pinto beans|
+2082|105|10|2|12|12061.20|0.08|0.05|A|F|1995-01-27|1995-02-11|1995-02-07|NONE|FOB| ironic instructions. carefull|
+2083|24|3|1|37|34188.74|0.07|0.00|R|F|1993-09-07|1993-09-30|1993-09-18|TAKE BACK RETURN|MAIL|ng the special foxes wake packages. f|
+2084|182|3|1|42|45451.56|0.03|0.05|A|F|1993-03-29|1993-05-05|1993-04-22|COLLECT COD|REG AIR|y fluffily even foxes. |
+2084|180|10|2|23|24844.14|0.09|0.08|A|F|1993-06-05|1993-05-26|1993-06-06|DELIVER IN PERSON|AIR|es against |
+2084|136|2|3|37|38336.81|0.07|0.05|A|F|1993-07-16|1993-04-20|1993-08-06|NONE|AIR|y careful courts.|
+2084|94|8|4|9|8946.81|0.02|0.02|A|F|1993-03-18|1993-06-08|1993-03-30|NONE|TRUCK|heaves boost slyly after the pla|
+2084|27|10|5|28|25956.56|0.07|0.02|R|F|1993-05-04|1993-05-14|1993-05-31|COLLECT COD|TRUCK|cajole quickly carefu|
+2084|115|9|6|15|15226.65|0.09|0.04|A|F|1993-06-23|1993-04-25|1993-07-23|COLLECT COD|SHIP|tithes. bravely pendi|
+2084|194|6|7|34|37202.46|0.09|0.02|R|F|1993-06-20|1993-05-28|1993-06-25|DELIVER IN PERSON|RAIL| carefully ironic requests. fluffil|
+2085|41|8|1|45|42346.80|0.00|0.07|R|F|1994-02-27|1994-01-11|1994-03-29|TAKE BACK RETURN|MAIL|. carefully e|
+2086|60|1|1|22|21121.32|0.03|0.07|R|F|1994-12-04|1994-12-16|1994-12-20|DELIVER IN PERSON|RAIL|idly busy acc|
+2086|141|10|2|32|33316.48|0.04|0.06|A|F|1994-11-15|1995-01-05|1994-12-09|TAKE BACK RETURN|TRUCK|e carefully along th|
+2086|105|6|3|44|44224.40|0.02|0.01|A|F|1994-12-04|1994-11-30|1994-12-21|DELIVER IN PERSON|FOB|latelets s|
+2086|84|5|4|27|26570.16|0.02|0.00|A|F|1994-11-04|1995-01-14|1994-11-25|COLLECT COD|REG AIR|theodolites haggle blithely blithe p|
+2086|156|1|5|33|34852.95|0.04|0.00|A|F|1995-02-06|1994-11-25|1995-02-15|NONE|SHIP| slyly regular foxes. un|
+2086|200|3|6|20|22004.00|0.01|0.03|R|F|1994-11-30|1994-12-28|1994-12-07|COLLECT COD|FOB|lithely ironic acc|
+2086|156|8|7|7|7393.05|0.04|0.05|R|F|1994-12-27|1994-12-10|1995-01-05|COLLECT COD|RAIL| beans haggle car|
+2087|127|8|1|1|1027.12|0.05|0.04|N|O|1998-03-27|1998-03-24|1998-04-18|DELIVER IN PERSON|REG AIR|the quickly idle acco|
+2087|168|3|2|46|49135.36|0.10|0.03|N|O|1998-02-24|1998-04-02|1998-03-04|DELIVER IN PERSON|AIR|ter the dolphins.|
+2087|62|3|3|1|962.06|0.02|0.05|N|O|1998-05-27|1998-04-11|1998-06-12|COLLECT COD|REG AIR|hely final acc|
+2087|59|1|4|6|5754.30|0.03|0.08|N|O|1998-04-23|1998-03-27|1998-05-18|DELIVER IN PERSON|REG AIR|dazzle after the slyly si|
+2112|71|2|1|18|17479.26|0.02|0.05|N|O|1997-05-02|1997-03-16|1997-05-25|TAKE BACK RETURN|TRUCK|lphins solve ideas. even, special reque|
+2113|123|8|1|40|40924.80|0.04|0.06|N|O|1998-01-16|1997-12-11|1998-02-06|TAKE BACK RETURN|TRUCK|bout the quickly ironic t|
+2113|112|2|2|24|24290.64|0.03|0.02|N|O|1998-02-19|1998-01-08|1998-03-16|COLLECT COD|MAIL|kly regular accounts hinder about the|
+2114|168|9|1|50|53408.00|0.05|0.05|A|F|1995-02-05|1995-03-18|1995-02-13|COLLECT COD|RAIL|pecial pinto bean|
+2114|186|7|2|26|28240.68|0.02|0.02|A|F|1995-04-30|1995-04-16|1995-05-28|NONE|SHIP|ar asymptotes sleep |
+2114|162|1|3|25|26554.00|0.07|0.01|A|F|1995-02-15|1995-03-13|1995-02-22|COLLECT COD|AIR|unts. regular, express accounts wake. b|
+2115|196|8|1|27|29597.13|0.06|0.03|N|O|1998-09-01|1998-07-29|1998-09-04|NONE|AIR|de of the carefully bold accounts |
+2115|184|5|2|43|46619.74|0.06|0.02|N|O|1998-07-14|1998-07-25|1998-07-24|COLLECT COD|FOB| carefully pending requests alongs|
+2115|51|3|3|3|2853.15|0.03|0.04|N|O|1998-07-23|1998-07-30|1998-08-14|DELIVER IN PERSON|FOB|quickly ironic dolphin|
+2115|49|10|4|47|44604.88|0.06|0.07|N|O|1998-08-29|1998-07-30|1998-09-05|TAKE BACK RETURN|REG AIR|regular accounts integrate brav|
+2115|199|3|5|13|14289.47|0.04|0.00|N|O|1998-08-07|1998-08-06|1998-08-13|DELIVER IN PERSON|REG AIR|into beans. even accounts abou|
+2116|131|2|1|2|2062.26|0.00|0.02|R|F|1994-10-16|1994-11-24|1994-11-09|DELIVER IN PERSON|TRUCK|r theodolites use blithely about the ir|
+2116|140|1|2|47|48886.58|0.10|0.06|R|F|1994-09-01|1994-11-18|1994-09-25|COLLECT COD|MAIL|iously ironic dependencies around the iro|
+2116|184|5|3|11|11925.98|0.03|0.05|R|F|1994-09-15|1994-10-21|1994-09-21|NONE|FOB| pinto beans. final, final sauternes play |
+2117|165|2|1|36|38345.76|0.10|0.01|N|O|1997-08-06|1997-07-15|1997-08-07|DELIVER IN PERSON|SHIP|ronic accounts wake|
+2117|61|6|2|19|18260.14|0.04|0.00|N|O|1997-07-30|1997-06-18|1997-08-13|DELIVER IN PERSON|REG AIR|s between the slyly regula|
+2117|58|3|3|43|41196.15|0.04|0.03|N|O|1997-06-27|1997-06-12|1997-07-22|DELIVER IN PERSON|SHIP| foxes sleep furiously |
+2117|91|4|4|24|23786.16|0.00|0.07|N|O|1997-06-15|1997-05-27|1997-06-18|COLLECT COD|SHIP|thely slyly pending platelets. ironic, |
+2117|147|8|5|3|3141.42|0.02|0.05|N|O|1997-05-05|1997-07-20|1997-05-26|TAKE BACK RETURN|TRUCK|tes cajole|
+2117|1|4|6|27|24327.00|0.09|0.08|N|O|1997-06-30|1997-06-27|1997-07-11|TAKE BACK RETURN|REG AIR| the carefully ironic ideas|
+2118|160|1|1|24|25443.84|0.10|0.03|N|O|1997-01-06|1996-12-14|1997-01-14|TAKE BACK RETURN|RAIL|about the slyly bold depende|
+2118|184|5|2|4|4336.72|0.08|0.01|N|O|1996-10-25|1996-11-10|1996-11-22|COLLECT COD|AIR|theodolites affix according |
+2118|145|4|3|11|11496.54|0.05|0.04|N|O|1996-12-23|1996-12-20|1997-01-01|COLLECT COD|RAIL|y ironic accounts sleep upon the packages. |
+2119|102|7|1|36|36075.60|0.04|0.00|N|O|1996-11-10|1996-10-25|1996-12-03|TAKE BACK RETURN|RAIL|ly bold foxes. ironic accoun|
+2144|92|6|1|33|32738.97|0.00|0.07|R|F|1994-04-04|1994-06-20|1994-04-23|NONE|AIR| ironic excuses haggle final dependencies. |
+2144|51|9|2|46|43748.30|0.03|0.08|R|F|1994-04-08|1994-04-29|1994-05-07|COLLECT COD|SHIP| foxes haggle blithel|
+2144|4|9|3|29|26216.00|0.00|0.07|R|F|1994-05-03|1994-05-16|1994-06-01|DELIVER IN PERSON|FOB|ns wake carefully carefully ironic|
+2144|158|9|4|10|10581.50|0.00|0.04|R|F|1994-06-16|1994-05-03|1994-07-05|COLLECT COD|AIR| furiously unusual ideas. carefull|
+2145|78|8|1|13|12714.91|0.04|0.05|A|F|1992-11-12|1992-12-13|1992-12-07|TAKE BACK RETURN|MAIL|alongside of the slyly final|
+2145|154|6|2|6|6324.90|0.05|0.01|A|F|1992-10-10|1992-11-29|1992-10-14|NONE|AIR|s. fluffily express accounts sleep. slyl|
+2146|57|5|1|42|40196.10|0.10|0.01|A|F|1992-09-21|1992-11-02|1992-09-23|NONE|AIR|ns according to the doggedly |
+2146|157|5|2|6|6342.90|0.07|0.05|A|F|1993-01-03|1992-10-24|1993-01-24|DELIVER IN PERSON|RAIL|ing to the requests. dependencies boost |
+2146|25|8|3|14|12950.28|0.03|0.01|R|F|1992-09-16|1992-10-16|1992-09-20|COLLECT COD|SHIP|ecial, express a|
+2146|26|9|4|31|28706.62|0.02|0.00|A|F|1993-01-04|1992-10-24|1993-01-15|DELIVER IN PERSON|TRUCK|lly even deposit|
+2146|169|4|5|28|29936.48|0.02|0.05|R|F|1993-01-03|1992-10-17|1993-01-08|COLLECT COD|MAIL|r accounts sleep furio|
+2146|71|9|6|32|31074.24|0.07|0.03|R|F|1993-01-10|1992-10-19|1993-02-05|COLLECT COD|TRUCK|y regular foxes wake among the final|
+2146|25|6|7|39|36075.78|0.07|0.06|R|F|1993-01-05|1992-11-06|1993-01-14|DELIVER IN PERSON|TRUCK|uickly regular excuses detect. regular c|
+2147|29|8|1|50|46451.00|0.04|0.06|R|F|1992-11-18|1992-11-30|1992-11-30|NONE|RAIL|al accounts. even, even foxes wake|
+2147|101|2|2|4|4004.40|0.01|0.04|A|F|1992-09-27|1992-11-15|1992-10-22|NONE|AIR|mong the blithely special|
+2147|44|7|3|34|32097.36|0.10|0.04|R|F|1992-11-29|1992-11-08|1992-12-22|TAKE BACK RETURN|REG AIR|egular deposits hang car|
+2147|11|8|4|11|10021.11|0.06|0.07|A|F|1992-09-27|1992-11-16|1992-10-16|NONE|AIR| the fluffily|
+2148|116|6|1|21|21338.31|0.09|0.01|R|F|1995-05-28|1995-05-26|1995-06-15|NONE|FOB|deposits ag|
+2149|19|9|1|12|11028.12|0.05|0.07|R|F|1993-06-01|1993-05-06|1993-06-11|TAKE BACK RETURN|TRUCK|riously bl|
+2149|99|10|2|10|9990.90|0.06|0.01|R|F|1993-06-09|1993-04-17|1993-06-16|DELIVER IN PERSON|TRUCK|eposits sleep above|
+2149|49|2|3|47|44604.88|0.00|0.04|R|F|1993-06-27|1993-05-12|1993-07-11|COLLECT COD|AIR|hely final depo|
+2149|129|8|4|18|18524.16|0.06|0.00|A|F|1993-04-05|1993-05-11|1993-04-23|DELIVER IN PERSON|REG AIR|uriously final pac|
+2149|60|5|5|22|21121.32|0.06|0.04|R|F|1993-05-24|1993-04-23|1993-06-20|TAKE BACK RETURN|SHIP|ptotes sleep along the blithely ir|
+2150|78|7|1|26|25429.82|0.00|0.03|A|F|1994-06-21|1994-08-05|1994-06-23|NONE|TRUCK|. always unusual packages|
+2150|18|8|2|29|26622.29|0.04|0.03|A|F|1994-09-02|1994-08-04|1994-10-02|TAKE BACK RETURN|RAIL|y ironic theodolites. foxes ca|
+2150|107|2|3|29|29205.90|0.04|0.08|R|F|1994-06-10|1994-07-31|1994-06-26|COLLECT COD|RAIL|arefully final att|
+2150|54|6|4|39|37207.95|0.05|0.02|R|F|1994-07-31|1994-08-17|1994-08-11|TAKE BACK RETURN|TRUCK|ess accounts nag. unusual asymptotes haggl|
+2150|183|4|5|35|37911.30|0.01|0.01|A|F|1994-09-27|1994-08-17|1994-10-13|COLLECT COD|RAIL|refully pending dependen|
+2150|7|10|6|12|10884.00|0.09|0.03|A|F|1994-08-27|1994-08-22|1994-09-18|COLLECT COD|AIR|press platelets haggle until the slyly fi|
+2151|167|2|1|23|24544.68|0.06|0.02|N|O|1996-11-20|1996-12-17|1996-11-30|DELIVER IN PERSON|AIR| silent dependencies about the slyl|
+2151|15|9|2|29|26535.29|0.00|0.02|N|O|1997-03-04|1996-12-27|1997-03-21|TAKE BACK RETURN|SHIP| bold packages acro|
+2151|165|2|3|49|52192.84|0.07|0.01|N|O|1997-01-20|1997-02-09|1997-02-18|NONE|FOB| packages. f|
+2151|18|5|4|28|25704.28|0.10|0.08|N|O|1996-12-11|1996-12-26|1996-12-12|DELIVER IN PERSON|AIR|y special packages. carefully ironic instru|
+2176|191|4|1|38|41465.22|0.02|0.08|R|F|1992-11-29|1993-01-14|1992-12-22|DELIVER IN PERSON|REG AIR|lithely ironic pinto beans. furious|
+2176|95|8|2|14|13931.26|0.00|0.06|A|F|1992-11-17|1993-01-07|1992-12-03|DELIVER IN PERSON|SHIP|ely ironic platelets |
+2176|160|1|3|25|26504.00|0.02|0.02|R|F|1993-02-23|1993-01-05|1993-03-07|COLLECT COD|RAIL| ruthless deposits according to the ent|
+2176|143|6|4|2|2086.28|0.05|0.06|A|F|1993-02-26|1993-01-08|1993-03-23|DELIVER IN PERSON|AIR|s pinto beans|
+2177|129|10|1|45|46310.40|0.02|0.01|N|O|1997-02-11|1997-02-27|1997-02-17|NONE|SHIP|. theodolites haggle carefu|
+2177|139|5|2|27|28056.51|0.04|0.08|N|O|1997-01-29|1997-03-20|1997-02-04|DELIVER IN PERSON|SHIP|even, regula|
+2177|81|2|3|23|22564.84|0.07|0.05|N|O|1997-01-28|1997-03-02|1997-02-13|DELIVER IN PERSON|AIR|he silent foxes. iro|
+2177|55|3|4|34|32471.70|0.05|0.07|N|O|1997-02-03|1997-04-10|1997-02-21|COLLECT COD|REG AIR|tes are doggedly quickly|
+2177|57|9|5|46|44024.30|0.09|0.05|N|O|1997-05-10|1997-02-23|1997-05-28|COLLECT COD|RAIL|ending asymptotes.|
+2177|122|7|6|11|11243.32|0.02|0.04|N|O|1997-03-20|1997-03-07|1997-04-09|DELIVER IN PERSON|MAIL|gainst the ca|
+2178|157|2|1|15|15857.25|0.10|0.01|N|O|1997-03-27|1997-03-10|1997-04-18|NONE|REG AIR|l accounts. quickly expr|
+2178|16|10|2|27|24732.27|0.01|0.02|N|O|1997-02-26|1997-02-19|1997-03-25|NONE|MAIL| across the ironic reques|
+2178|5|2|3|40|36200.00|0.00|0.03|N|O|1997-03-17|1997-02-09|1997-04-15|COLLECT COD|RAIL|foxes are slowly regularly specia|
+2178|78|6|4|3|2934.21|0.07|0.07|N|O|1997-04-07|1997-01-23|1997-04-18|COLLECT COD|MAIL| permanentl|
+2179|130|9|1|22|22662.86|0.05|0.08|N|O|1996-11-16|1996-11-03|1996-11-25|DELIVER IN PERSON|FOB|lphins cajole acr|
+2179|139|5|2|20|20782.60|0.03|0.01|N|O|1996-09-30|1996-11-10|1996-10-30|NONE|REG AIR|ncies. fin|
+2179|104|9|3|5|5020.50|0.03|0.02|N|O|1996-11-09|1996-10-08|1996-11-11|DELIVER IN PERSON|REG AIR|ts haggle blithely. ironic, careful theodol|
+2179|6|3|4|24|21744.00|0.04|0.04|N|O|1996-10-26|1996-11-05|1996-11-16|COLLECT COD|RAIL| cajole carefully. |
+2179|108|5|5|7|7056.70|0.00|0.02|N|O|1996-10-24|1996-11-14|1996-11-21|TAKE BACK RETURN|RAIL|gular dependencies. ironic packages haggle|
+2180|16|3|1|31|28396.31|0.06|0.04|N|O|1996-10-20|1996-11-21|1996-11-06|COLLECT COD|REG AIR|n requests are furiously at the quickly|
+2180|193|7|2|39|42634.41|0.01|0.00|N|O|1997-01-03|1996-10-29|1997-01-25|NONE|RAIL|ep furiously furiously final request|
+2180|197|9|3|24|26332.56|0.03|0.00|N|O|1997-01-03|1996-10-24|1997-01-19|NONE|SHIP|uriously f|
+2180|111|5|4|47|47522.17|0.07|0.02|N|O|1996-09-23|1996-12-08|1996-10-12|NONE|FOB|pending, regular ideas. iron|
+2180|143|2|5|23|23992.22|0.02|0.06|N|O|1996-11-08|1996-10-25|1996-11-28|NONE|TRUCK|ggle alongside of the fluffily speci|
+2180|55|6|6|48|45842.40|0.09|0.03|N|O|1996-12-30|1996-11-22|1997-01-16|DELIVER IN PERSON|RAIL|nic instructions haggle careful|
+2181|178|9|1|4|4312.68|0.05|0.04|N|O|1995-09-25|1995-11-12|1995-09-28|COLLECT COD|FOB|tes. slyly silent packages use along th|
+2181|88|9|2|46|45451.68|0.00|0.02|N|O|1995-11-28|1995-10-17|1995-12-26|COLLECT COD|AIR|osits. final packages sleep|
+2181|91|2|3|15|14866.35|0.08|0.05|N|O|1995-10-05|1995-10-27|1995-11-03|DELIVER IN PERSON|FOB|e above the fluffily regul|
+2181|55|10|4|28|26741.40|0.04|0.05|N|O|1995-12-21|1995-10-23|1996-01-04|TAKE BACK RETURN|AIR|s excuses sleep car|
+2181|96|7|5|9|8964.81|0.06|0.07|N|O|1996-01-05|1995-12-05|1996-01-08|COLLECT COD|TRUCK|ward the quietly even requests. ir|
+2182|132|8|1|27|27867.51|0.02|0.07|R|F|1994-05-10|1994-07-04|1994-06-04|DELIVER IN PERSON|SHIP|en platele|
+2182|190|1|2|3|3270.57|0.05|0.03|R|F|1994-04-20|1994-07-04|1994-04-24|TAKE BACK RETURN|SHIP|y bold theodolites wi|
+2182|94|6|3|34|33799.06|0.02|0.03|R|F|1994-05-28|1994-06-02|1994-06-10|COLLECT COD|MAIL| slow tithes. ironi|
+2182|7|4|4|12|10884.00|0.04|0.07|A|F|1994-05-08|1994-06-02|1994-05-09|COLLECT COD|REG AIR|ments are fu|
+2182|179|9|5|37|39929.29|0.06|0.02|A|F|1994-04-08|1994-06-29|1994-04-18|TAKE BACK RETURN|TRUCK|ges. blithely ironic|
+2183|71|1|1|29|28161.03|0.05|0.01|N|O|1996-07-21|1996-08-24|1996-08-15|TAKE BACK RETURN|RAIL|ly unusual deposits sleep carefully|
+2183|52|3|2|25|23801.25|0.06|0.02|N|O|1996-07-06|1996-08-21|1996-08-05|NONE|RAIL|he quickly f|
+2208|58|3|1|48|45986.40|0.08|0.07|A|F|1995-05-13|1995-06-30|1995-05-20|COLLECT COD|MAIL|sits. idly permanent request|
+2208|97|1|2|11|10967.99|0.08|0.01|A|F|1995-05-06|1995-07-19|1995-05-22|COLLECT COD|TRUCK|ding waters lose. furiously regu|
+2208|74|4|3|41|39936.87|0.08|0.02|N|O|1995-08-18|1995-06-19|1995-09-05|COLLECT COD|RAIL|nd the furious, express dependencies.|
+2208|43|2|4|50|47152.00|0.07|0.07|N|F|1995-06-11|1995-05-31|1995-06-29|TAKE BACK RETURN|FOB|al foxes will hav|
+2208|30|5|5|43|39991.29|0.03|0.06|A|F|1995-05-10|1995-06-02|1995-06-09|TAKE BACK RETURN|MAIL|es. accounts cajole. fi|
+2208|167|2|6|18|19208.88|0.02|0.08|R|F|1995-06-06|1995-06-10|1995-06-11|TAKE BACK RETURN|TRUCK|packages are quickly bold de|
+2208|7|2|7|45|40815.00|0.00|0.08|A|F|1995-05-05|1995-06-10|1995-05-11|NONE|SHIP|e fluffily regular theodolites caj|
+2209|23|2|1|40|36920.80|0.05|0.01|R|F|1992-11-01|1992-09-25|1992-11-08|DELIVER IN PERSON|SHIP|ully special sheaves serve|
+2209|103|4|2|10|10031.00|0.00|0.02|R|F|1992-09-02|1992-09-24|1992-09-21|DELIVER IN PERSON|AIR|players. carefully reg|
+2209|64|1|3|11|10604.66|0.01|0.01|A|F|1992-07-12|1992-08-24|1992-08-10|DELIVER IN PERSON|REG AIR|express, regular pinto be|
+2209|181|2|4|39|42166.02|0.08|0.07|R|F|1992-11-04|1992-09-02|1992-11-11|TAKE BACK RETURN|MAIL|ly around the final packages. deposits ca|
+2209|124|7|5|24|24578.88|0.08|0.06|R|F|1992-08-09|1992-08-18|1992-08-25|COLLECT COD|AIR| along the bol|
+2209|178|7|6|7|7547.19|0.09|0.07|A|F|1992-08-18|1992-09-09|1992-09-12|DELIVER IN PERSON|AIR| quickly regular pack|
+2210|78|7|1|36|35210.52|0.10|0.00|A|F|1992-03-04|1992-03-24|1992-03-21|DELIVER IN PERSON|AIR| requests wake enticingly final|
+2211|48|1|1|25|23701.00|0.04|0.01|A|F|1994-10-09|1994-08-04|1994-11-03|TAKE BACK RETURN|RAIL|deas. carefully special theodolites along|
+2211|140|6|2|40|41605.60|0.09|0.06|A|F|1994-09-30|1994-09-10|1994-10-26|NONE|MAIL|posits among the express dolphins|
+2211|160|2|3|25|26504.00|0.00|0.07|A|F|1994-08-13|1994-08-17|1994-08-16|NONE|AIR|ly regular, express|
+2211|85|6|4|23|22656.84|0.03|0.02|R|F|1994-10-05|1994-09-13|1994-10-17|DELIVER IN PERSON|AIR|ependencies |
+2211|135|1|5|3|3105.39|0.02|0.04|A|F|1994-08-28|1994-09-10|1994-09-06|TAKE BACK RETURN|SHIP|pendencies after the regular f|
+2211|187|8|6|18|19569.24|0.05|0.08|A|F|1994-08-31|1994-09-07|1994-09-22|NONE|TRUCK|c grouches. slyly express pinto |
+2211|79|9|7|3|2937.21|0.06|0.05|R|F|1994-09-21|1994-08-10|1994-10-19|TAKE BACK RETURN|RAIL|y slyly final|
+2212|71|10|1|18|17479.26|0.07|0.06|R|F|1994-06-22|1994-06-18|1994-06-25|TAKE BACK RETURN|FOB| cajole. final, pending ideas should are bl|
+2213|118|8|1|20|20362.20|0.01|0.00|A|F|1993-01-21|1993-04-14|1993-01-29|COLLECT COD|REG AIR|iously express accounts; |
+2213|60|1|2|4|3840.24|0.09|0.05|R|F|1993-04-15|1993-04-15|1993-05-05|COLLECT COD|SHIP| affix carefully furiously |
+2213|70|5|3|1|970.07|0.05|0.05|A|F|1993-04-25|1993-04-06|1993-04-28|TAKE BACK RETURN|AIR|s along the ironic reques|
+2213|174|3|4|39|41892.63|0.09|0.05|A|F|1993-05-12|1993-04-07|1993-05-23|TAKE BACK RETURN|SHIP|the blithely |
+2213|38|9|5|43|40335.29|0.04|0.03|A|F|1993-04-18|1993-03-11|1993-05-11|TAKE BACK RETURN|RAIL|r packages are along the carefully bol|
+2213|48|5|6|41|38869.64|0.01|0.00|R|F|1993-01-31|1993-03-31|1993-02-28|COLLECT COD|FOB| carefully pend|
+2213|64|9|7|3|2892.18|0.02|0.04|A|F|1993-03-09|1993-03-17|1993-04-07|TAKE BACK RETURN|AIR|o wake. ironic platel|
+2214|76|5|1|27|26353.89|0.04|0.04|N|O|1998-05-31|1998-06-07|1998-06-19|DELIVER IN PERSON|REG AIR|x fluffily along the even packages-- |
+2214|194|5|2|50|54709.50|0.00|0.02|N|O|1998-07-06|1998-06-16|1998-07-16|TAKE BACK RETURN|MAIL|accounts. blith|
+2214|113|7|3|42|42550.62|0.04|0.08|N|O|1998-05-26|1998-07-13|1998-06-22|COLLECT COD|FOB|ons. deposi|
+2214|196|9|4|22|24116.18|0.01|0.01|N|O|1998-05-30|1998-07-02|1998-06-09|DELIVER IN PERSON|RAIL|t the blithely|
+2215|73|1|1|33|32111.31|0.00|0.00|N|O|1996-07-19|1996-08-10|1996-07-30|COLLECT COD|RAIL|dolites cajole b|
+2215|33|9|2|30|27990.90|0.01|0.00|N|O|1996-08-15|1996-09-10|1996-08-25|NONE|FOB|ckages caj|
+2215|57|5|3|30|28711.50|0.07|0.03|N|O|1996-09-09|1996-07-20|1996-09-28|COLLECT COD|TRUCK|against the carefu|
+2215|146|3|4|20|20922.80|0.02|0.02|N|O|1996-09-09|1996-08-10|1996-09-19|NONE|MAIL| unusual deposits haggle carefully. ide|
+2240|164|3|1|6|6384.96|0.01|0.00|A|F|1992-06-23|1992-05-17|1992-07-20|COLLECT COD|AIR|ymptotes boost. furiously bold p|
+2240|28|1|2|37|34336.74|0.03|0.07|R|F|1992-03-16|1992-05-31|1992-04-05|COLLECT COD|FOB| quickly after the packages? blithely si|
+2240|53|5|3|39|37168.95|0.08|0.06|A|F|1992-05-22|1992-05-10|1992-06-08|NONE|FOB|y orbits. final depos|
+2240|86|7|4|10|9860.80|0.09|0.00|A|F|1992-05-25|1992-04-14|1992-06-23|DELIVER IN PERSON|REG AIR|are across the ironic packages.|
+2240|161|10|5|29|30773.64|0.02|0.06|A|F|1992-03-29|1992-05-08|1992-04-09|COLLECT COD|MAIL|lyly even ideas w|
+2240|81|2|6|32|31394.56|0.06|0.06|R|F|1992-04-11|1992-04-18|1992-04-22|NONE|MAIL|ss thinly deposits. blithely bold package|
+2240|78|7|7|24|23473.68|0.04|0.05|R|F|1992-05-13|1992-04-09|1992-05-14|DELIVER IN PERSON|FOB|ng the silent accounts. slyly ironic t|
+2241|5|6|1|25|22625.00|0.00|0.08|R|F|1993-08-11|1993-07-23|1993-09-01|DELIVER IN PERSON|MAIL| final deposits use fluffily. even f|
+2241|195|8|2|38|41617.22|0.04|0.06|A|F|1993-08-04|1993-07-31|1993-08-06|TAKE BACK RETURN|TRUCK| silent, unusual d|
+2241|97|10|3|48|47860.32|0.08|0.04|A|F|1993-05-14|1993-07-30|1993-05-26|TAKE BACK RETURN|RAIL|ss accounts engage furiously. slyly even re|
+2241|167|4|4|19|20276.04|0.10|0.00|A|F|1993-06-01|1993-08-05|1993-06-07|TAKE BACK RETURN|TRUCK| are furiously quickl|
+2241|82|3|5|2|1964.16|0.04|0.03|A|F|1993-08-16|1993-08-02|1993-08-24|NONE|REG AIR|, express deposits. pear|
+2241|116|3|6|22|22354.42|0.02|0.08|R|F|1993-08-13|1993-06-15|1993-08-16|DELIVER IN PERSON|TRUCK|, ironic depen|
+2241|142|3|7|9|9379.26|0.09|0.03|A|F|1993-05-14|1993-07-12|1993-05-29|NONE|AIR|lyly final |
+2242|123|4|1|15|15346.80|0.09|0.08|N|O|1997-08-04|1997-09-21|1997-08-11|COLLECT COD|FOB|its. carefully express packages cajole. bli|
+2243|127|8|1|10|10271.20|0.04|0.06|N|O|1995-07-26|1995-07-18|1995-08-03|NONE|RAIL|express, daring foxes affix fur|
+2244|51|6|1|3|2853.15|0.02|0.02|A|F|1993-04-30|1993-03-15|1993-05-19|TAKE BACK RETURN|FOB| beans for the regular platel|
+2244|193|6|2|16|17491.04|0.01|0.06|R|F|1993-02-12|1993-03-09|1993-02-28|COLLECT COD|FOB|rate around the reques|
+2245|76|7|1|44|42947.08|0.03|0.03|A|F|1993-06-12|1993-06-10|1993-06-16|NONE|TRUCK|refully even sheaves|
+2245|74|3|2|28|27273.96|0.05|0.03|R|F|1993-08-19|1993-07-27|1993-09-04|COLLECT COD|TRUCK|e requests sleep furiou|
+2245|86|7|3|33|32540.64|0.03|0.01|R|F|1993-06-26|1993-06-11|1993-07-17|TAKE BACK RETURN|AIR|ing to the carefully ruthless accounts|
+2245|189|10|4|14|15248.52|0.02|0.04|R|F|1993-05-06|1993-07-21|1993-05-19|DELIVER IN PERSON|RAIL|nts. always unusual dep|
+2245|80|8|5|33|32342.64|0.03|0.07|R|F|1993-06-16|1993-06-05|1993-07-07|NONE|MAIL| across the express reques|
+2246|53|4|1|22|20967.10|0.02|0.01|N|O|1996-07-25|1996-08-03|1996-08-24|DELIVER IN PERSON|SHIP|ructions wake carefully fina|
+2246|104|5|2|43|43176.30|0.07|0.06|N|O|1996-08-25|1996-08-23|1996-09-19|DELIVER IN PERSON|AIR|ainst the ironic theodolites haggle fi|
+2246|18|8|3|11|10098.11|0.10|0.00|N|O|1996-06-21|1996-07-24|1996-07-18|TAKE BACK RETURN|TRUCK|quests alongside o|
+2246|163|8|4|13|13821.08|0.08|0.05|N|O|1996-09-15|1996-07-21|1996-10-08|DELIVER IN PERSON|AIR|equests. fluffily special epitaphs use|
+2247|172|2|1|12|12866.04|0.02|0.07|A|F|1992-09-06|1992-09-18|1992-09-26|NONE|MAIL|final accounts. requests across the furiou|
+2272|90|1|1|18|17821.62|0.04|0.00|R|F|1993-08-01|1993-07-06|1993-08-25|NONE|MAIL|ons along the blithely e|
+2272|34|10|2|40|37361.20|0.07|0.00|A|F|1993-04-25|1993-07-12|1993-05-15|DELIVER IN PERSON|FOB|lithely ir|
+2272|56|4|3|36|34417.80|0.03|0.02|A|F|1993-05-25|1993-05-23|1993-06-09|TAKE BACK RETURN|RAIL|about the ironic packages; quickly iron|
+2272|138|4|4|30|31143.90|0.09|0.07|A|F|1993-07-27|1993-05-15|1993-08-13|NONE|RAIL|quests at the foxes haggle evenly pack|
+2272|76|4|5|12|11712.84|0.03|0.03|A|F|1993-04-19|1993-05-14|1993-04-23|NONE|RAIL| accounts cajole. quickly b|
+2273|184|5|1|34|36862.12|0.02|0.03|N|O|1997-01-08|1997-02-02|1997-01-23|COLLECT COD|MAIL| furiously carefully bold de|
+2273|85|6|2|35|34477.80|0.00|0.05|N|O|1997-01-02|1997-01-19|1997-01-14|NONE|REG AIR|arefully f|
+2273|95|8|3|8|7960.72|0.00|0.04|N|O|1996-12-15|1997-02-27|1997-01-10|NONE|FOB|dependencies. slyly ir|
+2273|161|6|4|20|21223.20|0.06|0.04|N|O|1997-03-05|1997-02-25|1997-04-01|NONE|RAIL|cuses. quickly enticing requests wake |
+2273|162|7|5|18|19118.88|0.07|0.00|N|O|1996-12-16|1997-01-21|1997-01-03|COLLECT COD|TRUCK| beans. doggedly final packages wake|
+2273|155|7|6|16|16882.40|0.10|0.03|N|O|1997-01-10|1997-02-03|1997-02-01|TAKE BACK RETURN|RAIL|furiously above the ironic requests. |
+2273|20|1|7|7|6440.14|0.05|0.05|N|O|1997-02-19|1997-01-22|1997-02-21|TAKE BACK RETURN|TRUCK|ts. furiou|
+2274|12|6|1|18|16416.18|0.04|0.03|R|F|1993-09-06|1993-12-03|1993-09-22|COLLECT COD|SHIP|usly final re|
+2274|111|8|2|23|23255.53|0.04|0.03|R|F|1993-10-28|1993-11-03|1993-11-05|NONE|MAIL|kly special warhorse|
+2274|129|10|3|18|18524.16|0.03|0.06|R|F|1993-09-28|1993-11-22|1993-10-12|DELIVER IN PERSON|SHIP| express packages. even accounts hagg|
+2275|34|5|1|30|28020.90|0.08|0.05|R|F|1993-01-10|1992-11-21|1993-01-22|NONE|REG AIR|re slyly slyly special idea|
+2275|91|4|2|11|10901.99|0.08|0.03|A|F|1993-01-16|1992-12-10|1993-01-25|COLLECT COD|REG AIR|ost across the never express instruction|
+2276|119|9|1|5|5095.55|0.07|0.08|N|O|1996-05-09|1996-06-18|1996-05-13|DELIVER IN PERSON|FOB|ias instea|
+2276|135|1|2|13|13456.69|0.08|0.04|N|O|1996-07-24|1996-06-18|1996-08-16|COLLECT COD|RAIL|arefully ironic foxes cajole q|
+2276|171|2|3|27|28921.59|0.07|0.08|N|O|1996-07-30|1996-06-10|1996-07-31|DELIVER IN PERSON|RAIL|the carefully unusual accoun|
+2276|109|6|4|38|38345.80|0.06|0.03|N|O|1996-07-07|1996-06-28|1996-07-17|COLLECT COD|RAIL|ans. pinto beans boost c|
+2276|153|5|5|50|52657.50|0.03|0.05|N|O|1996-07-13|1996-06-25|1996-07-22|DELIVER IN PERSON|REG AIR| accounts dete|
+2276|6|9|6|4|3624.00|0.10|0.03|N|O|1996-07-05|1996-06-30|1996-08-04|COLLECT COD|FOB|s. deposits |
+2277|137|8|1|38|39410.94|0.03|0.07|R|F|1995-04-23|1995-03-25|1995-05-20|TAKE BACK RETURN|TRUCK|fully bold|
+2277|8|1|2|2|1816.00|0.10|0.08|A|F|1995-02-01|1995-02-04|1995-03-02|TAKE BACK RETURN|AIR|endencies sleep idly pending p|
+2277|198|10|3|4|4392.76|0.05|0.06|R|F|1995-04-27|1995-03-16|1995-04-29|TAKE BACK RETURN|SHIP|. quickly unusual deposi|
+2277|159|4|4|31|32833.65|0.02|0.00|R|F|1995-03-07|1995-03-19|1995-03-26|TAKE BACK RETURN|MAIL|ic instructions detect ru|
+2278|45|2|1|36|34021.44|0.04|0.05|N|O|1998-06-04|1998-06-06|1998-06-30|NONE|TRUCK|y ironic pinto beans br|
+2278|45|2|2|50|47252.00|0.02|0.00|N|O|1998-08-09|1998-07-08|1998-09-05|DELIVER IN PERSON|RAIL|into beans. blit|
+2278|97|9|3|22|21935.98|0.03|0.00|N|O|1998-05-15|1998-07-14|1998-06-04|TAKE BACK RETURN|REG AIR|ep regular accounts. blithely even|
+2279|14|5|1|12|10968.12|0.07|0.08|A|F|1993-05-10|1993-03-25|1993-06-02|COLLECT COD|REG AIR|lets across the excuses nag quickl|
+2279|41|2|2|38|35759.52|0.08|0.07|R|F|1993-06-09|1993-04-06|1993-06-26|COLLECT COD|TRUCK|s above the furiously express dep|
+2279|4|7|3|3|2712.00|0.09|0.04|A|F|1993-05-31|1993-05-07|1993-06-05|COLLECT COD|REG AIR|ing foxes above the even accounts use slyly|
+2279|52|4|4|42|39986.10|0.02|0.00|R|F|1993-02-28|1993-04-25|1993-03-02|TAKE BACK RETURN|REG AIR| above the furiously ironic deposits. |
+2279|169|8|5|9|9622.44|0.05|0.04|R|F|1993-05-21|1993-03-29|1993-06-17|DELIVER IN PERSON|MAIL|ns cajole after the final platelets. s|
+2279|147|10|6|12|12565.68|0.02|0.00|R|F|1993-05-04|1993-04-26|1993-05-28|DELIVER IN PERSON|FOB|ccounts. slyl|
+2279|119|9|7|32|32611.52|0.05|0.05|A|F|1993-04-20|1993-05-22|1993-05-18|DELIVER IN PERSON|RAIL|re quickly. furiously ironic ide|
+2304|200|2|1|42|46208.40|0.00|0.01|A|F|1994-01-20|1994-03-04|1994-02-05|COLLECT COD|RAIL|quests are blithely alongside of|
+2304|19|9|2|48|44112.48|0.00|0.00|R|F|1994-02-12|1994-02-16|1994-03-10|COLLECT COD|REG AIR| deposits cajole blithely e|
+2304|48|9|3|3|2844.12|0.00|0.05|R|F|1994-03-19|1994-03-04|1994-03-20|DELIVER IN PERSON|AIR|l excuses after the ev|
+2305|174|4|1|3|3222.51|0.00|0.01|A|F|1993-03-24|1993-04-05|1993-03-29|NONE|AIR|kages haggle quickly across the blithely |
+2305|60|8|2|39|37442.34|0.07|0.00|R|F|1993-04-16|1993-04-17|1993-04-22|COLLECT COD|MAIL|ms after the foxes |
+2305|102|3|3|32|32067.20|0.03|0.06|A|F|1993-04-02|1993-03-18|1993-04-03|NONE|AIR| haggle caref|
+2305|112|3|4|17|17205.87|0.00|0.05|A|F|1993-02-21|1993-03-30|1993-03-19|TAKE BACK RETURN|MAIL| carefully alongside of |
+2305|155|7|5|26|27433.90|0.06|0.07|A|F|1993-05-14|1993-02-28|1993-06-04|NONE|SHIP|arefully final theodo|
+2305|51|3|6|7|6657.35|0.06|0.00|R|F|1993-05-15|1993-04-25|1993-06-09|DELIVER IN PERSON|RAIL|gular deposits boost about the foxe|
+2306|196|9|1|50|54809.50|0.09|0.01|N|O|1995-07-27|1995-09-26|1995-08-06|DELIVER IN PERSON|FOB|y quickly |
+2306|149|2|2|39|40916.46|0.04|0.00|N|O|1995-09-07|1995-09-13|1995-10-03|COLLECT COD|SHIP|f the slyly unusual accounts. furiousl|
+2306|178|6|3|35|37735.95|0.01|0.07|N|O|1995-08-18|1995-08-30|1995-08-20|TAKE BACK RETURN|RAIL|raids along the furiously unusual asympto|
+2306|119|3|4|21|21401.31|0.06|0.01|N|O|1995-10-07|1995-09-18|1995-10-17|COLLECT COD|MAIL| ironic pinto |
+2306|142|9|5|42|43769.88|0.04|0.07|N|O|1995-09-05|1995-08-25|1995-09-28|COLLECT COD|MAIL|furiously final acco|
+2306|124|5|6|29|29699.48|0.00|0.03|N|O|1995-11-01|1995-09-01|1995-11-22|DELIVER IN PERSON|REG AIR|uld have to mold. s|
+2306|176|4|7|19|20447.23|0.07|0.01|N|O|1995-11-17|1995-09-06|1995-11-30|DELIVER IN PERSON|AIR|tainments nag furiously carefull|
+2307|142|9|1|24|25011.36|0.10|0.05|R|F|1993-10-07|1993-08-05|1993-10-20|COLLECT COD|AIR|stealthily special packages nag a|
+2307|140|6|2|2|2080.28|0.01|0.00|A|F|1993-09-21|1993-08-22|1993-10-03|COLLECT COD|SHIP|ously. furiously furious requ|
+2307|34|10|3|7|6538.21|0.07|0.04|R|F|1993-08-03|1993-09-04|1993-08-28|DELIVER IN PERSON|AIR|ven instructions wake fluffily |
+2307|165|6|4|19|20238.04|0.08|0.06|R|F|1993-10-23|1993-09-09|1993-11-09|TAKE BACK RETURN|TRUCK|olites haggle furiously around the |
+2307|143|4|5|7|7301.98|0.01|0.06|R|F|1993-09-01|1993-08-08|1993-09-29|NONE|AIR| packages cajo|
+2308|118|9|1|24|24434.64|0.06|0.04|R|F|1993-02-23|1992-12-24|1993-03-10|NONE|MAIL|ts sleep. busy excuses along the s|
+2308|56|1|2|36|34417.80|0.05|0.06|A|F|1992-11-11|1992-11-27|1992-11-23|NONE|MAIL|ong the pending hockey players. blithe|
+2309|170|7|1|14|14982.38|0.10|0.03|N|O|1996-01-01|1995-10-22|1996-01-23|NONE|AIR|asymptotes. furiously pending acco|
+2309|169|8|2|1|1069.16|0.01|0.05|N|O|1995-12-08|1995-11-03|1995-12-31|COLLECT COD|RAIL|eposits alongside of the final re|
+2309|15|2|3|5|4575.05|0.01|0.00|N|O|1995-12-10|1995-10-29|1996-01-06|TAKE BACK RETURN|SHIP|s. requests wake blithely specia|
+2309|139|10|4|46|47799.98|0.08|0.04|N|O|1995-10-02|1995-10-30|1995-10-30|NONE|REG AIR|sly according to the carefully |
+2309|137|3|5|9|9334.17|0.00|0.07|N|O|1995-12-21|1995-10-10|1996-01-20|COLLECT COD|AIR|ding, unusual instructions. dep|
+2309|195|8|6|21|22998.99|0.09|0.00|N|O|1995-11-05|1995-11-07|1995-11-22|NONE|AIR|unts around the dolphins ar|
+2309|138|4|7|48|49830.24|0.03|0.05|N|O|1995-10-21|1995-11-21|1995-11-09|NONE|MAIL|ccounts. id|
+2310|58|6|1|36|34489.80|0.03|0.03|N|O|1996-10-09|1996-10-28|1996-10-29|TAKE BACK RETURN|RAIL|iously against the slyly special accounts|
+2310|171|2|2|6|6427.02|0.07|0.01|N|O|1996-11-08|1996-12-09|1996-12-07|COLLECT COD|REG AIR|e slyly about the quickly ironic theodo|
+2310|42|1|3|48|45217.92|0.08|0.02|N|O|1996-10-04|1996-11-20|1996-10-25|TAKE BACK RETURN|FOB|ep slyly alongside of the |
+2311|141|8|1|18|18740.52|0.01|0.01|N|F|1995-06-11|1995-06-18|1995-07-02|NONE|FOB| fluffily even patterns haggle blithely. re|
+2311|122|1|2|49|50083.88|0.09|0.02|R|F|1995-05-14|1995-07-11|1995-05-20|COLLECT COD|FOB|ideas sleep|
+2311|54|5|3|15|14310.75|0.08|0.04|N|O|1995-06-23|1995-06-06|1995-07-09|COLLECT COD|AIR|ve the blithely pending accounts. furio|
+2311|90|1|4|42|41583.78|0.01|0.06|R|F|1995-06-03|1995-06-27|1995-06-11|DELIVER IN PERSON|MAIL|gle furiously. bold |
+2311|47|10|5|1|947.04|0.05|0.02|A|F|1995-06-07|1995-06-20|1995-06-10|NONE|AIR|ptotes. furiously regular theodolite|
+2311|12|9|6|32|29184.32|0.01|0.03|N|O|1995-07-19|1995-06-26|1995-07-26|NONE|RAIL|sts along the slyly|
+2336|193|5|1|20|21863.80|0.01|0.03|N|O|1996-03-12|1996-02-25|1996-03-18|NONE|REG AIR|across the fi|
+2337|45|2|1|49|46306.96|0.06|0.05|N|O|1997-08-08|1997-08-15|1997-08-31|TAKE BACK RETURN|FOB| along the packages. furiously p|
+2338|52|7|1|30|28561.50|0.07|0.06|N|O|1997-12-10|1997-10-15|1997-12-11|TAKE BACK RETURN|REG AIR|ould have to nag quickly|
+2339|192|3|1|22|24028.18|0.03|0.03|A|F|1994-01-06|1994-03-06|1994-01-10|NONE|FOB| furiously above |
+2339|30|5|2|28|26040.84|0.00|0.00|R|F|1994-01-25|1994-01-22|1994-01-28|DELIVER IN PERSON|RAIL|e bold, even packag|
+2339|117|4|3|13|13222.43|0.06|0.08|R|F|1994-03-10|1994-02-18|1994-03-20|TAKE BACK RETURN|REG AIR|ges. blithely special depend|
+2340|138|4|1|9|9343.17|0.08|0.02|N|O|1996-05-01|1996-02-24|1996-05-16|COLLECT COD|RAIL|. carefully ironic|
+2340|193|5|2|21|22956.99|0.06|0.02|N|O|1996-01-17|1996-03-04|1996-01-29|DELIVER IN PERSON|SHIP| asymptotes. unusual theo|
+2341|47|10|1|12|11364.48|0.08|0.03|R|F|1993-06-06|1993-07-08|1993-06-17|DELIVER IN PERSON|FOB|. quickly final deposits sl|
+2341|71|10|2|37|35929.59|0.07|0.08|A|F|1993-09-23|1993-07-25|1993-10-14|DELIVER IN PERSON|RAIL|was blithel|
+2341|195|8|3|8|8761.52|0.03|0.07|R|F|1993-06-08|1993-07-09|1993-06-10|COLLECT COD|FOB|ns affix above the iron|
+2342|42|1|1|12|11304.48|0.00|0.08|N|O|1996-07-31|1996-07-26|1996-08-14|NONE|TRUCK|print blithely even deposits. carefull|
+2342|117|1|2|24|24410.64|0.10|0.06|N|O|1996-09-30|1996-07-22|1996-10-28|TAKE BACK RETURN|AIR|nstructions c|
+2342|170|1|3|50|53508.50|0.10|0.01|N|O|1996-08-28|1996-07-18|1996-09-22|COLLECT COD|RAIL|cial asymptotes pr|
+2342|36|7|4|1|936.03|0.04|0.06|N|O|1996-08-31|1996-08-09|1996-09-07|DELIVER IN PERSON|REG AIR|ffily. unusual pinto beans wake c|
+2342|27|2|5|22|20394.44|0.08|0.01|N|O|1996-08-10|1996-08-02|1996-08-31|DELIVER IN PERSON|AIR|s. ironic |
+2343|110|1|1|27|27272.97|0.00|0.00|N|O|1995-11-10|1995-11-17|1995-12-10|TAKE BACK RETURN|SHIP|old theodolites.|
+2343|66|1|2|35|33812.10|0.03|0.06|N|O|1995-10-24|1995-11-09|1995-10-26|COLLECT COD|TRUCK|ges haggle furiously carefully regular req|
+2343|179|7|3|21|22662.57|0.00|0.03|N|O|1995-09-07|1995-10-26|1995-10-07|TAKE BACK RETURN|RAIL|osits. unusual theodolites boost furio|
+2368|152|3|1|16|16834.40|0.04|0.03|R|F|1993-10-31|1993-10-22|1993-11-06|NONE|REG AIR|telets wake carefully iro|
+2368|14|5|2|32|29248.32|0.03|0.00|R|F|1993-09-23|1993-10-07|1993-09-27|COLLECT COD|TRUCK|gular courts use blithely around the|
+2368|149|6|3|39|40916.46|0.08|0.03|R|F|1993-09-03|1993-09-20|1993-09-28|COLLECT COD|RAIL|ng the doggedly ironic requests are blithe|
+2368|156|8|4|17|17954.55|0.10|0.08|A|F|1993-10-03|1993-09-27|1993-10-05|NONE|FOB|fily. slyly final ideas alongside o|
+2369|24|7|1|30|27720.60|0.05|0.04|N|O|1997-04-23|1997-02-12|1997-05-21|COLLECT COD|REG AIR|pecial deposits sleep. blithely unusual w|
+2369|169|10|2|47|50250.52|0.10|0.02|N|O|1997-01-02|1997-02-18|1997-01-13|COLLECT COD|RAIL| to the regular dep|
+2370|46|3|1|3|2838.12|0.03|0.07|R|F|1994-03-24|1994-03-26|1994-04-15|COLLECT COD|SHIP|ly regular Tiresia|
+2370|2|5|2|24|21648.00|0.00|0.05|A|F|1994-05-15|1994-04-09|1994-06-12|NONE|REG AIR|final depen|
+2370|61|2|3|32|30753.92|0.05|0.02|A|F|1994-04-24|1994-03-03|1994-05-15|DELIVER IN PERSON|MAIL|ies since the final deposits|
+2370|6|3|4|21|19026.00|0.04|0.01|R|F|1994-02-01|1994-02-19|1994-02-09|TAKE BACK RETURN|MAIL|ecial dependencies must have to |
+2371|159|4|1|37|39188.55|0.05|0.05|N|O|1998-02-11|1998-03-24|1998-02-27|DELIVER IN PERSON|TRUCK|s boost fluffil|
+2371|35|1|2|21|19635.63|0.00|0.05|N|O|1998-04-14|1998-02-14|1998-04-18|COLLECT COD|AIR|gle furiously regu|
+2371|101|4|3|11|11012.10|0.05|0.02|N|O|1998-02-25|1998-04-06|1998-03-23|TAKE BACK RETURN|TRUCK|requests. regular pinto beans wake. car|
+2371|43|6|4|33|31120.32|0.05|0.08|N|O|1998-03-30|1998-02-06|1998-04-05|DELIVER IN PERSON|AIR|deas are. express r|
+2371|165|2|5|22|23433.52|0.02|0.05|N|O|1998-03-26|1998-03-19|1998-04-16|DELIVER IN PERSON|REG AIR|y daring accounts. regular ins|
+2371|86|7|6|39|38457.12|0.05|0.03|N|O|1998-04-01|1998-03-13|1998-04-27|NONE|REG AIR|tructions. regular, stealthy packages wak|
+2371|36|2|7|32|29952.96|0.07|0.07|N|O|1998-02-15|1998-04-03|1998-02-23|NONE|REG AIR|the ruthless accounts. |
+2372|43|4|1|42|39607.68|0.08|0.02|N|O|1998-01-04|1998-01-02|1998-02-02|COLLECT COD|REG AIR|lar packages. regular|
+2372|3|10|2|17|15351.00|0.07|0.01|N|O|1997-12-17|1998-01-17|1997-12-25|NONE|RAIL|xcuses. slyly ironic theod|
+2372|164|1|3|12|12769.92|0.04|0.04|N|O|1998-03-21|1997-12-21|1998-04-12|DELIVER IN PERSON|SHIP|lyly according to|
+2372|122|1|4|4|4088.48|0.00|0.07|N|O|1997-12-14|1997-12-28|1997-12-16|TAKE BACK RETURN|REG AIR|e carefully blithely even epitaphs. r|
+2372|20|7|5|5|4600.10|0.02|0.04|N|O|1998-02-08|1998-01-18|1998-03-02|TAKE BACK RETURN|RAIL|ets against the |
+2372|189|10|6|11|11980.98|0.02|0.06|N|O|1998-02-14|1998-01-18|1998-03-10|TAKE BACK RETURN|FOB| silent, pending de|
+2372|57|8|7|19|18183.95|0.01|0.06|N|O|1997-12-26|1998-02-19|1998-01-02|COLLECT COD|SHIP| beans haggle sometimes|
+2373|191|5|1|17|18550.23|0.02|0.01|R|F|1994-03-29|1994-05-19|1994-04-20|COLLECT COD|AIR|auternes. blithely even pinto bea|
+2373|136|2|2|3|3108.39|0.08|0.08|R|F|1994-05-15|1994-06-10|1994-06-04|COLLECT COD|TRUCK|dependencies wake ironical|
+2373|141|8|3|29|30193.06|0.05|0.02|A|F|1994-06-01|1994-05-14|1994-06-17|NONE|TRUCK|yly silent ideas affix furiousl|
+2373|91|5|4|5|4955.45|0.10|0.01|R|F|1994-06-02|1994-05-03|1994-06-21|NONE|REG AIR|uffily blithely ironic requests|
+2374|118|2|1|41|41742.51|0.07|0.00|A|F|1994-01-27|1993-12-11|1994-02-12|TAKE BACK RETURN|RAIL|heodolites. requests|
+2374|160|2|2|24|25443.84|0.07|0.08|A|F|1994-02-02|1994-01-12|1994-02-04|DELIVER IN PERSON|TRUCK|. requests are above t|
+2374|61|8|3|2|1922.12|0.06|0.02|R|F|1993-12-30|1994-01-24|1994-01-02|COLLECT COD|FOB|, unusual ideas. deposits cajole quietl|
+2374|74|5|4|28|27273.96|0.04|0.08|R|F|1994-02-19|1993-12-16|1994-03-15|COLLECT COD|MAIL|ets cajole fu|
+2374|1|2|5|25|22525.00|0.08|0.00|A|F|1993-11-26|1993-12-15|1993-12-10|COLLECT COD|RAIL|refully pending d|
+2375|168|9|1|3|3204.48|0.02|0.08|N|O|1997-02-14|1996-12-25|1997-02-22|COLLECT COD|RAIL|slyly across the furiously e|
+2375|132|8|2|9|9289.17|0.09|0.02|N|O|1997-02-17|1996-12-27|1997-02-27|DELIVER IN PERSON|MAIL|ly against the packages. bold pinto bean|
+2375|47|4|3|26|24623.04|0.02|0.06|N|O|1997-03-18|1997-02-02|1997-03-29|TAKE BACK RETURN|TRUCK|rate across the|
+2375|5|8|4|5|4525.00|0.01|0.00|N|O|1997-01-31|1997-01-25|1997-02-22|COLLECT COD|REG AIR|final packages cajole according to the furi|
+2375|88|9|5|42|41499.36|0.01|0.08|N|O|1997-01-24|1997-02-15|1997-02-07|DELIVER IN PERSON|FOB|apades. idea|
+2375|126|7|6|20|20522.40|0.09|0.08|N|O|1996-12-01|1996-12-26|1996-12-19|TAKE BACK RETURN|SHIP|ckages! blithely enticing deposi|
+2400|103|6|1|48|48148.80|0.01|0.02|N|O|1998-10-07|1998-08-30|1998-11-03|DELIVER IN PERSON|MAIL|fore the car|
+2400|90|1|2|1|990.09|0.04|0.07|N|O|1998-08-18|1998-09-12|1998-09-11|NONE|MAIL|silent deposits serve furious|
+2400|53|5|3|23|21920.15|0.02|0.08|N|O|1998-08-05|1998-08-28|1998-08-30|NONE|SHIP|tions. fluffily ironic platelets cajole c|
+2400|17|7|4|23|21091.23|0.09|0.04|N|O|1998-10-04|1998-10-04|1998-10-31|NONE|RAIL|ages lose carefully around the regula|
+2401|182|3|1|39|42205.02|0.00|0.03|N|O|1997-09-29|1997-10-21|1997-10-17|DELIVER IN PERSON|FOB|ould affix |
+2401|3|8|2|49|44247.00|0.05|0.07|N|O|1997-09-02|1997-09-11|1997-09-13|TAKE BACK RETURN|AIR|lites cajole carefully |
+2402|86|7|1|43|42401.44|0.03|0.08|N|O|1996-09-17|1996-11-20|1996-09-22|DELIVER IN PERSON|RAIL|slyly slyly blithe sheaves|
+2402|152|4|2|24|25251.60|0.02|0.05|N|O|1996-11-21|1996-10-19|1996-11-29|DELIVER IN PERSON|SHIP|as; blithely ironic requ|
+2403|83|4|1|34|33424.72|0.04|0.07|N|O|1998-05-30|1998-06-19|1998-06-05|NONE|REG AIR| slyly bold re|
+2403|152|4|2|19|19990.85|0.08|0.07|N|O|1998-04-20|1998-07-02|1998-05-13|DELIVER IN PERSON|FOB|sits. ironic in|
+2403|193|4|3|27|29516.13|0.05|0.03|N|O|1998-07-27|1998-07-08|1998-08-03|NONE|SHIP|deposits sleep slyly special theodolit|
+2403|31|2|4|30|27930.90|0.05|0.06|N|O|1998-08-08|1998-06-17|1998-08-20|NONE|TRUCK|ackages sleep furiously pendin|
+2404|147|10|1|36|37697.04|0.07|0.00|N|O|1997-03-27|1997-05-16|1997-04-06|COLLECT COD|REG AIR|s nag furi|
+2404|36|2|2|1|936.03|0.02|0.04|N|O|1997-05-22|1997-06-06|1997-05-28|DELIVER IN PERSON|MAIL|from the final orbits? even pinto beans hag|
+2404|18|5|3|41|37638.41|0.02|0.06|N|O|1997-06-12|1997-05-03|1997-07-12|NONE|AIR| dolphins are|
+2404|57|8|4|19|18183.95|0.09|0.03|N|O|1997-05-07|1997-05-24|1997-05-24|TAKE BACK RETURN|SHIP|cuses. quickly even in|
+2404|4|9|5|18|16272.00|0.00|0.04|N|O|1997-06-25|1997-05-06|1997-07-02|NONE|RAIL|packages. even requests according to |
+2405|89|10|1|18|17803.44|0.09|0.07|N|O|1997-01-23|1997-03-10|1997-02-03|COLLECT COD|REG AIR|carefully ironic accounts. slyly |
+2405|27|10|2|30|27810.60|0.10|0.08|N|O|1997-03-24|1997-03-10|1997-04-14|TAKE BACK RETURN|AIR|y final deposits are slyly caref|
+2405|17|8|3|49|44933.49|0.00|0.06|N|O|1996-12-24|1997-03-23|1997-01-01|TAKE BACK RETURN|FOB|cial requests. ironic, regu|
+2405|177|7|4|23|24774.91|0.08|0.05|N|O|1996-12-28|1997-01-29|1997-01-07|NONE|AIR|t wake blithely blithely regular idea|
+2406|170|5|1|18|19263.06|0.07|0.05|N|O|1997-02-17|1996-12-25|1997-02-19|COLLECT COD|MAIL|azzle furiously careful|
+2406|41|8|2|40|37641.60|0.02|0.07|N|O|1997-01-09|1996-12-02|1997-01-16|NONE|SHIP|gular accounts caj|
+2406|50|1|3|16|15200.80|0.07|0.03|N|O|1996-10-31|1996-11-28|1996-11-08|TAKE BACK RETURN|SHIP| special accou|
+2406|146|9|4|34|35568.76|0.07|0.06|N|O|1996-12-01|1996-12-07|1996-12-16|NONE|AIR|hinly even accounts are slyly q|
+2406|187|8|5|25|27179.50|0.08|0.02|N|O|1996-12-03|1996-12-14|1996-12-26|COLLECT COD|MAIL|al, regular in|
+2406|59|4|6|22|21099.10|0.05|0.02|N|O|1996-11-22|1997-01-17|1996-12-15|NONE|TRUCK|hely even foxes unwind furiously aga|
+2406|60|2|7|30|28801.80|0.07|0.07|N|O|1997-01-17|1997-01-12|1997-01-22|TAKE BACK RETURN|TRUCK| final pinto beans han|
+2407|64|3|1|14|13496.84|0.04|0.02|N|O|1998-10-10|1998-08-25|1998-10-27|NONE|FOB|l dependencies s|
+2407|166|7|2|9|9595.44|0.07|0.05|N|O|1998-08-06|1998-08-11|1998-08-20|TAKE BACK RETURN|TRUCK|ts. special deposits are closely.|
+2407|131|2|3|39|40214.07|0.02|0.02|N|O|1998-08-20|1998-09-12|1998-08-22|DELIVER IN PERSON|MAIL|iously final deposits solv|
+2407|91|4|4|10|9910.90|0.01|0.07|N|O|1998-08-14|1998-09-10|1998-08-29|COLLECT COD|FOB| pending instructions. theodolites x-|
+2407|198|1|5|14|15374.66|0.04|0.05|N|O|1998-09-24|1998-08-18|1998-10-06|DELIVER IN PERSON|FOB|tructions wake stealt|
+2407|71|9|6|18|17479.26|0.04|0.01|N|O|1998-10-03|1998-08-30|1998-10-19|TAKE BACK RETURN|MAIL| wake carefully. fluffily |
+2407|161|8|7|7|7428.12|0.07|0.03|N|O|1998-09-11|1998-08-15|1998-09-30|TAKE BACK RETURN|MAIL|totes are carefully accordin|
+2432|50|3|1|30|28501.50|0.03|0.02|N|O|1996-09-05|1996-10-10|1996-10-05|TAKE BACK RETURN|TRUCK| requests wake alongside of|
+2432|162|3|2|8|8497.28|0.07|0.01|N|O|1996-10-16|1996-10-01|1996-11-13|COLLECT COD|RAIL|s about the bold, close deposit|
+2432|109|2|3|13|13118.30|0.07|0.06|N|O|1996-09-03|1996-10-10|1996-10-03|NONE|RAIL|arefully about the caref|
+2432|13|4|4|14|12782.14|0.00|0.06|N|O|1996-08-18|1996-09-04|1996-08-27|TAKE BACK RETURN|RAIL|riously regular packages. p|
+2433|87|8|1|39|38496.12|0.01|0.04|R|F|1994-11-20|1994-09-23|1994-12-10|DELIVER IN PERSON|SHIP|ly final asy|
+2433|134|5|2|20|20682.60|0.05|0.06|A|F|1994-12-09|1994-10-20|1994-12-15|COLLECT COD|REG AIR|lithely blithely final ide|
+2433|157|2|3|38|40171.70|0.08|0.03|A|F|1994-10-15|1994-10-23|1994-11-06|DELIVER IN PERSON|SHIP|. slyly regular requests sle|
+2433|121|6|4|43|43908.16|0.01|0.05|A|F|1994-10-16|1994-10-23|1994-11-08|DELIVER IN PERSON|RAIL|ular requests. slyly even pa|
+2433|108|1|5|3|3024.30|0.06|0.02|A|F|1994-11-08|1994-09-24|1994-11-17|COLLECT COD|AIR|usly pending depos|
+2434|95|6|1|1|995.09|0.01|0.06|N|O|1997-08-02|1997-05-28|1997-08-19|TAKE BACK RETURN|MAIL| furiously express packages. ironic, pend|
+2434|127|10|2|39|40057.68|0.09|0.05|N|O|1997-06-10|1997-06-08|1997-07-03|COLLECT COD|RAIL|r deposits sleep furiou|
+2434|130|3|3|28|28843.64|0.02|0.05|N|O|1997-06-28|1997-06-26|1997-07-15|COLLECT COD|RAIL|ven theodolites around the slyly|
+2434|168|9|4|49|52339.84|0.00|0.05|N|O|1997-08-08|1997-07-23|1997-08-27|DELIVER IN PERSON|FOB| after the requests haggle bold, fina|
+2435|39|10|1|8|7512.24|0.08|0.03|A|F|1993-06-08|1993-04-04|1993-06-29|COLLECT COD|SHIP|e fluffily quickly final accounts. care|
+2435|49|2|2|43|40808.72|0.03|0.08|A|F|1993-03-27|1993-05-20|1993-04-18|DELIVER IN PERSON|TRUCK|alongside of the s|
+2435|12|9|3|24|21888.24|0.07|0.08|R|F|1993-03-14|1993-05-20|1993-03-26|DELIVER IN PERSON|SHIP|s. carefully regular d|
+2435|156|4|4|22|23235.30|0.02|0.05|R|F|1993-05-23|1993-04-14|1993-06-04|NONE|SHIP|e final, final deposits. carefully regular|
+2435|72|2|5|3|2916.21|0.07|0.07|R|F|1993-06-01|1993-03-25|1993-06-27|DELIVER IN PERSON|FOB| final accounts ar|
+2435|46|9|6|17|16082.68|0.02|0.02|A|F|1993-06-05|1993-05-05|1993-06-14|NONE|TRUCK|cajole aft|
+2435|121|10|7|8|8168.96|0.07|0.02|R|F|1993-05-03|1993-04-02|1993-05-17|COLLECT COD|SHIP|ng the fluffily special foxes nag |
+2436|155|6|1|48|50647.20|0.04|0.02|N|O|1995-10-22|1995-10-22|1995-11-16|DELIVER IN PERSON|FOB|he furiously |
+2436|117|7|2|18|18307.98|0.05|0.03|N|O|1995-10-14|1995-11-21|1995-11-12|TAKE BACK RETURN|TRUCK|y ironic accounts. furiously even packa|
+2436|164|3|3|6|6384.96|0.06|0.08|N|O|1995-10-25|1995-11-30|1995-11-24|DELIVER IN PERSON|RAIL|odolites. ep|
+2437|94|6|1|46|45728.14|0.07|0.04|A|F|1993-08-12|1993-06-16|1993-08-29|NONE|RAIL|e of the bold, dogged requests|
+2437|190|1|2|26|28344.94|0.00|0.04|A|F|1993-06-25|1993-05-22|1993-07-07|DELIVER IN PERSON|REG AIR|lyly regular accounts.|
+2437|2|7|3|23|20746.00|0.01|0.00|A|F|1993-08-15|1993-06-28|1993-08-23|TAKE BACK RETURN|SHIP|s deposits. pendi|
+2437|116|10|4|12|12193.32|0.03|0.08|A|F|1993-04-27|1993-07-01|1993-05-18|TAKE BACK RETURN|FOB|thely regular deposits. ironic fray|
+2437|17|7|5|29|26593.29|0.02|0.06|A|F|1993-05-12|1993-06-10|1993-05-25|NONE|FOB|ress dolphins. furiously fin|
+2437|19|3|6|10|9190.10|0.10|0.06|A|F|1993-05-20|1993-06-23|1993-05-22|TAKE BACK RETURN|MAIL|unts. even, ironic pl|
+2438|165|2|1|45|47932.20|0.01|0.00|A|F|1993-10-27|1993-09-24|1993-11-02|COLLECT COD|REG AIR|en theodolites w|
+2438|13|4|2|31|28303.31|0.08|0.01|R|F|1993-10-16|1993-08-31|1993-11-10|COLLECT COD|REG AIR|t. slyly ironic sh|
+2438|68|7|3|10|9680.60|0.10|0.00|R|F|1993-08-18|1993-08-28|1993-09-08|NONE|SHIP|engage car|
+2438|161|8|4|27|28651.32|0.01|0.02|R|F|1993-07-27|1993-10-01|1993-08-06|TAKE BACK RETURN|FOB|inal accounts. slyly final reques|
+2438|166|3|5|28|29852.48|0.07|0.06|R|F|1993-11-05|1993-08-22|1993-11-22|TAKE BACK RETURN|TRUCK|ctions. bli|
+2438|149|6|6|23|24130.22|0.09|0.02|R|F|1993-10-06|1993-08-17|1993-10-16|DELIVER IN PERSON|MAIL|ely; blithely special pinto beans breach|
+2438|183|4|7|46|49826.28|0.02|0.05|R|F|1993-10-27|1993-08-30|1993-11-14|COLLECT COD|SHIP| ironic requests cajole f|
+2439|164|1|1|2|2128.32|0.09|0.03|N|O|1997-04-14|1997-06-11|1997-05-09|COLLECT COD|MAIL|courts boos|
+2439|144|5|2|5|5220.70|0.07|0.01|N|O|1997-04-23|1997-04-26|1997-04-28|DELIVER IN PERSON|FOB|ites. furiously|
+2439|195|7|3|33|36141.27|0.08|0.05|N|O|1997-06-01|1997-05-15|1997-06-07|TAKE BACK RETURN|FOB|asymptotes wake packages-- furiously|
+2464|49|8|1|10|9490.40|0.05|0.03|N|O|1998-02-04|1997-12-29|1998-02-16|TAKE BACK RETURN|RAIL|slyly final pinto bean|
+2464|101|6|2|20|20022.00|0.01|0.07|N|O|1997-12-26|1998-01-02|1998-01-24|DELIVER IN PERSON|FOB|sts. slyly close ideas shall h|
+2465|68|5|1|27|26137.62|0.05|0.02|N|O|1995-09-05|1995-09-07|1995-09-17|DELIVER IN PERSON|FOB|posits boost carefully unusual instructio|
+2465|51|3|2|34|32335.70|0.02|0.05|N|O|1995-10-02|1995-08-04|1995-10-09|COLLECT COD|RAIL|posits wake. regular package|
+2465|32|3|3|8|7456.24|0.10|0.00|N|O|1995-10-16|1995-08-26|1995-11-07|TAKE BACK RETURN|FOB|s across the express deposits wak|
+2465|148|7|4|45|47166.30|0.03|0.01|N|O|1995-09-27|1995-08-25|1995-10-06|NONE|TRUCK|y silent foxes. final pinto beans above |
+2465|47|4|5|50|47352.00|0.01|0.04|N|O|1995-09-01|1995-09-06|1995-09-18|TAKE BACK RETURN|TRUCK|the pending th|
+2465|124|5|6|20|20482.40|0.03|0.03|N|O|1995-08-16|1995-08-13|1995-09-02|COLLECT COD|FOB|uriously? furiously ironic excu|
+2466|186|7|1|16|17378.88|0.00|0.02|R|F|1994-04-20|1994-04-20|1994-05-09|COLLECT COD|FOB|to beans sl|
+2466|105|8|2|10|10051.00|0.00|0.00|A|F|1994-05-08|1994-04-06|1994-06-05|DELIVER IN PERSON|AIR|sly regular deposits. regular, regula|
+2466|14|1|3|29|26506.29|0.10|0.07|A|F|1994-06-11|1994-04-27|1994-07-10|DELIVER IN PERSON|FOB|ckages. bold requests nag carefully.|
+2466|11|8|4|29|26419.29|0.04|0.04|A|F|1994-04-01|1994-04-20|1994-04-23|DELIVER IN PERSON|MAIL|es boost fluffily ab|
+2466|79|10|5|30|29372.10|0.02|0.01|A|F|1994-04-11|1994-05-02|1994-05-02|DELIVER IN PERSON|REG AIR|. fluffily even pinto beans are idly. f|
+2466|173|2|6|19|20390.23|0.10|0.07|R|F|1994-06-12|1994-04-18|1994-07-12|NONE|MAIL|ccounts cajole a|
+2466|155|7|7|35|36930.25|0.10|0.00|A|F|1994-06-01|1994-05-27|1994-06-21|COLLECT COD|AIR| packages detect carefully: ironically sl|
+2467|133|9|1|7|7231.91|0.00|0.00|N|O|1995-07-28|1995-10-04|1995-08-27|NONE|REG AIR|gular packages cajole |
+2468|94|7|1|46|45728.14|0.00|0.04|N|O|1997-07-16|1997-08-09|1997-08-07|COLLECT COD|SHIP|unusual theodolites su|
+2468|21|10|2|43|39603.86|0.00|0.04|N|O|1997-08-17|1997-08-21|1997-08-30|DELIVER IN PERSON|FOB|uriously eve|
+2468|195|6|3|44|48188.36|0.00|0.03|N|O|1997-10-01|1997-08-02|1997-10-09|TAKE BACK RETURN|RAIL|egular, silent sheave|
+2468|82|3|4|5|4910.40|0.08|0.00|N|O|1997-06-28|1997-08-02|1997-07-22|NONE|MAIL| sleep fluffily acc|
+2468|159|7|5|18|19064.70|0.07|0.00|N|O|1997-07-25|1997-08-26|1997-08-14|DELIVER IN PERSON|REG AIR|cies. fluffily r|
+2469|166|1|1|11|11727.76|0.00|0.04|N|O|1997-02-09|1997-01-26|1997-02-16|NONE|TRUCK|ies wake carefully b|
+2469|114|1|2|16|16225.76|0.07|0.06|N|O|1997-02-19|1997-02-04|1997-03-18|NONE|MAIL|ing asymptotes |
+2469|11|5|3|48|43728.48|0.05|0.06|N|O|1997-01-11|1997-01-03|1997-01-15|TAKE BACK RETURN|AIR|riously even theodolites u|
+2469|88|9|4|35|34582.80|0.06|0.06|N|O|1997-02-04|1997-02-02|1997-02-17|DELIVER IN PERSON|RAIL|ld packages haggle regular frets. fluffily |
+2469|121|4|5|30|30633.60|0.09|0.01|N|O|1996-12-21|1997-01-29|1997-01-02|COLLECT COD|SHIP| accounts. regular theodolites affix fu|
+2469|104|5|6|49|49200.90|0.02|0.02|N|O|1997-03-03|1996-12-26|1997-03-13|NONE|AIR| requests are car|
+2469|127|10|7|8|8216.96|0.02|0.00|N|O|1997-03-15|1997-01-20|1997-04-13|NONE|TRUCK|s. regular|
+2470|110|5|1|12|12121.32|0.06|0.06|N|O|1997-07-12|1997-05-24|1997-07-17|TAKE BACK RETURN|FOB|l accounts. deposits nag daringly. express,|
+2470|100|4|2|50|50005.00|0.03|0.03|N|O|1997-06-02|1997-06-01|1997-06-09|COLLECT COD|AIR| packages |
+2470|64|3|3|10|9640.60|0.05|0.08|N|O|1997-06-20|1997-06-19|1997-06-24|TAKE BACK RETURN|FOB| ironic requests a|
+2470|162|3|4|30|31864.80|0.04|0.08|N|O|1997-08-04|1997-07-13|1997-08-14|DELIVER IN PERSON|AIR|s across the furiously fina|
+2471|84|5|1|37|36410.96|0.05|0.01|N|O|1998-05-28|1998-04-17|1998-06-08|COLLECT COD|TRUCK|ounts mold blithely carefully express depo|
+2496|141|8|1|38|39563.32|0.02|0.07|R|F|1994-03-26|1994-04-06|1994-04-23|COLLECT COD|RAIL| bold accounts. furi|
+2496|23|4|2|39|35997.78|0.03|0.00|R|F|1994-03-23|1994-02-18|1994-04-10|TAKE BACK RETURN|FOB|arefully special dependencies abo|
+2496|189|10|3|36|39210.48|0.09|0.04|R|F|1994-03-27|1994-03-15|1994-04-17|TAKE BACK RETURN|SHIP|ully ironic f|
+2496|24|9|4|30|27720.60|0.04|0.01|A|F|1994-01-27|1994-03-11|1994-01-31|DELIVER IN PERSON|RAIL|ake. ironic foxes cajole quickly. fu|
+2497|12|2|1|34|31008.34|0.02|0.03|R|F|1992-09-02|1992-10-19|1992-09-12|COLLECT COD|AIR|ronic accounts. p|
+2497|77|7|2|15|14656.05|0.09|0.02|A|F|1992-12-23|1992-11-20|1993-01-18|DELIVER IN PERSON|SHIP|sly against the|
+2497|34|5|3|28|26152.84|0.02|0.08|A|F|1992-12-02|1992-11-21|1992-12-04|DELIVER IN PERSON|REG AIR|ouches. special, regular requests|
+2497|144|5|4|48|50118.72|0.06|0.05|A|F|1992-09-29|1992-11-13|1992-10-19|TAKE BACK RETURN|AIR| even, regular requests across |
+2497|175|5|5|28|30104.76|0.04|0.05|A|F|1992-11-10|1992-09-30|1992-11-18|DELIVER IN PERSON|MAIL|hely bold ideas. unusual instructions ac|
+2497|71|2|6|19|18450.33|0.05|0.08|A|F|1992-11-10|1992-11-20|1992-12-05|TAKE BACK RETURN|TRUCK| instructions? carefully daring accounts|
+2498|143|2|1|48|50070.72|0.10|0.01|R|F|1993-11-25|1994-01-09|1993-12-24|DELIVER IN PERSON|RAIL|onic requests wake|
+2499|150|3|1|15|15752.25|0.04|0.06|N|O|1995-12-21|1995-12-06|1996-01-19|DELIVER IN PERSON|FOB| slyly across the slyly|
+2499|46|3|2|48|45409.92|0.09|0.03|N|O|1995-10-14|1995-12-12|1995-11-11|DELIVER IN PERSON|AIR|ronic ideas cajole quickly requests. caref|
+2499|133|9|3|31|32027.03|0.09|0.05|N|O|1995-12-09|1995-10-28|1996-01-05|COLLECT COD|AIR|to beans across the carefully ironic theodo|
+2499|159|7|4|39|41306.85|0.06|0.02|N|O|1995-10-26|1995-10-27|1995-11-07|TAKE BACK RETURN|SHIP|otes sublat|
+2499|130|9|5|6|6180.78|0.02|0.01|N|O|1995-11-19|1995-12-14|1995-12-08|NONE|SHIP|cording to the|
+2499|119|3|6|12|12229.32|0.04|0.05|N|O|1995-11-18|1995-12-13|1995-11-23|COLLECT COD|REG AIR|le furiously along the r|
+2500|192|3|1|40|43687.60|0.00|0.02|A|F|1992-09-02|1992-09-30|1992-09-06|DELIVER IN PERSON|SHIP|efully unusual dolphins s|
+2500|37|8|2|34|31859.02|0.06|0.02|R|F|1992-10-03|1992-11-11|1992-10-29|DELIVER IN PERSON|TRUCK| stealthy a|
+2500|80|10|3|41|40183.28|0.02|0.00|R|F|1992-09-02|1992-11-11|1992-09-06|DELIVER IN PERSON|RAIL|s could have to integrate after the |
+2500|69|8|4|17|16474.02|0.01|0.02|A|F|1992-09-30|1992-10-16|1992-10-05|DELIVER IN PERSON|REG AIR|encies-- ironic, even packages|
+2501|84|5|1|4|3936.32|0.10|0.06|N|O|1997-07-17|1997-07-27|1997-07-22|COLLECT COD|RAIL|quests. furiously final|
+2501|106|1|2|33|33201.30|0.01|0.04|N|O|1997-07-14|1997-08-09|1997-07-26|NONE|MAIL|leep furiously packages. even sauternes |
+2501|72|2|3|20|19441.40|0.10|0.06|N|O|1997-09-23|1997-07-01|1997-10-03|DELIVER IN PERSON|RAIL|equests. furiou|
+2501|58|10|4|26|24909.30|0.09|0.01|N|O|1997-07-15|1997-08-15|1997-07-28|DELIVER IN PERSON|SHIP|c accounts. express, iron|
+2502|163|4|1|33|35084.28|0.10|0.06|R|F|1993-08-12|1993-07-22|1993-09-04|COLLECT COD|REG AIR|have to print|
+2503|123|2|1|33|33762.96|0.06|0.01|R|F|1993-07-06|1993-08-14|1993-08-02|NONE|SHIP|nal courts integrate according to the|
+2503|65|10|2|28|27021.68|0.06|0.01|R|F|1993-08-08|1993-08-31|1993-08-10|NONE|SHIP|s wake quickly slyly |
+2503|46|7|3|50|47302.00|0.09|0.01|A|F|1993-09-22|1993-08-17|1993-09-29|DELIVER IN PERSON|TRUCK|s around the slyly |
+2503|91|5|4|27|26759.43|0.09|0.00|A|F|1993-07-12|1993-07-24|1993-07-22|DELIVER IN PERSON|TRUCK|lly even p|
+2503|48|5|5|3|2844.12|0.04|0.02|A|F|1993-07-10|1993-09-17|1993-07-19|TAKE BACK RETURN|TRUCK|s cajole. slyly close courts nod f|
+2503|128|7|6|39|40096.68|0.05|0.05|R|F|1993-10-11|1993-09-09|1993-10-16|NONE|MAIL|d carefully fluffily|
+2503|19|6|7|17|15623.17|0.09|0.08|R|F|1993-09-04|1993-07-31|1993-09-23|DELIVER IN PERSON|SHIP|c accounts haggle blithel|
+2528|1|2|1|10|9010.00|0.02|0.03|R|F|1994-12-12|1994-12-29|1994-12-28|COLLECT COD|REG AIR|ely. fluffily even re|
+2528|74|3|2|13|12662.91|0.00|0.03|A|F|1994-11-27|1995-01-20|1994-12-03|TAKE BACK RETURN|REG AIR|ggle furiously. slyly final asympt|
+2528|175|6|3|35|37630.95|0.10|0.00|R|F|1994-12-19|1995-02-04|1995-01-15|NONE|MAIL|, even excuses. even,|
+2528|65|4|4|37|35707.22|0.00|0.01|A|F|1994-12-25|1995-02-02|1994-12-31|COLLECT COD|AIR|ng the pending excuses haggle after the bl|
+2529|131|7|1|4|4124.52|0.07|0.07|N|O|1996-10-19|1996-11-18|1996-10-24|DELIVER IN PERSON|SHIP|al dependencies haggle slyly alongsi|
+2530|21|2|1|9|8289.18|0.09|0.03|R|F|1994-05-10|1994-04-30|1994-05-24|TAKE BACK RETURN|REG AIR|lyly ironic|
+2530|93|7|2|42|41709.78|0.04|0.08|R|F|1994-03-27|1994-05-20|1994-03-29|NONE|RAIL|ng platelets wake s|
+2530|108|1|3|8|8064.80|0.10|0.08|A|F|1994-05-02|1994-05-08|1994-05-24|DELIVER IN PERSON|MAIL|ial asymptotes snooze slyly regular |
+2531|148|7|1|9|9433.26|0.03|0.07|N|O|1996-07-27|1996-07-03|1996-08-01|DELIVER IN PERSON|AIR|t the dogged, un|
+2531|157|2|2|3|3171.45|0.07|0.06|N|O|1996-07-20|1996-06-20|1996-08-10|NONE|MAIL|he quickly ev|
+2531|86|7|3|20|19721.60|0.06|0.04|N|O|1996-07-18|1996-06-25|1996-07-29|TAKE BACK RETURN|TRUCK|into beans. furious|
+2531|191|5|4|36|39282.84|0.08|0.01|N|O|1996-06-11|1996-07-26|1996-06-27|NONE|MAIL|y ironic, bold packages. blithely e|
+2531|56|4|5|28|26769.40|0.03|0.07|N|O|1996-07-06|1996-07-31|1996-07-19|TAKE BACK RETURN|REG AIR|its. busily|
+2531|145|4|6|46|48076.44|0.10|0.08|N|O|1996-07-03|1996-06-27|1996-07-12|TAKE BACK RETURN|REG AIR|e final, bold pains. ir|
+2532|53|4|1|3|2859.15|0.06|0.07|N|O|1995-12-14|1995-11-28|1995-12-15|COLLECT COD|FOB|unusual sentiments. even pinto|
+2532|160|2|2|33|34985.28|0.06|0.05|N|O|1995-11-23|1996-01-04|1995-12-16|DELIVER IN PERSON|TRUCK|rve carefully slyly ironic accounts! fluf|
+2532|135|1|3|1|1035.13|0.00|0.06|N|O|1996-01-27|1995-11-23|1996-01-29|DELIVER IN PERSON|REG AIR|ely final ideas cajole despite the ca|
+2532|78|8|4|50|48903.50|0.02|0.02|N|O|1995-11-13|1996-01-01|1995-11-26|NONE|TRUCK|yly after the fluffily regul|
+2532|114|1|5|9|9126.99|0.09|0.04|N|O|1995-11-30|1995-11-23|1995-12-12|DELIVER IN PERSON|TRUCK|cial ideas haggle slyly pending request|
+2532|150|1|6|20|21003.00|0.09|0.05|N|O|1995-12-02|1995-11-26|1995-12-08|TAKE BACK RETURN|AIR|er the slyly pending|
+2533|54|9|1|36|34345.80|0.06|0.04|N|O|1997-06-10|1997-04-28|1997-07-01|NONE|REG AIR|ss requests sleep neve|
+2533|198|10|2|5|5490.95|0.10|0.04|N|O|1997-05-26|1997-06-02|1997-06-24|NONE|FOB|ccounts. ironic, special accounts boo|
+2533|183|4|3|37|40077.66|0.00|0.08|N|O|1997-05-10|1997-04-26|1997-05-28|COLLECT COD|SHIP| haggle carefully |
+2533|30|5|4|17|15810.51|0.06|0.02|N|O|1997-05-23|1997-05-10|1997-06-18|NONE|FOB|ackages. blith|
+2533|126|1|5|38|38992.56|0.09|0.00|N|O|1997-05-10|1997-06-02|1997-05-28|TAKE BACK RETURN|REG AIR|of the regular accounts. even packages caj|
+2533|184|5|6|20|21683.60|0.05|0.08|N|O|1997-07-04|1997-04-30|1997-07-05|COLLECT COD|FOB|thless excuses are b|
+2533|94|7|7|14|13917.26|0.06|0.04|N|O|1997-07-06|1997-05-08|1997-08-03|COLLECT COD|FOB|ut the pending, special depos|
+2534|139|5|1|29|30134.77|0.07|0.07|N|O|1996-08-09|1996-09-29|1996-08-11|COLLECT COD|TRUCK|ugouts haggle slyly. final|
+2534|27|6|2|49|45423.98|0.08|0.08|N|O|1996-09-01|1996-08-20|1996-09-06|NONE|SHIP|sometimes regular requests. blithely unus|
+2534|1|4|3|50|45050.00|0.10|0.06|N|O|1996-09-25|1996-10-07|1996-10-09|TAKE BACK RETURN|AIR|ideas. deposits use. slyly regular pa|
+2534|75|3|4|43|41928.01|0.09|0.02|N|O|1996-10-25|1996-09-30|1996-11-05|TAKE BACK RETURN|REG AIR|ngly final depos|
+2534|165|2|5|14|14912.24|0.05|0.02|N|O|1996-08-12|1996-09-26|1996-08-28|COLLECT COD|MAIL|eposits doze quickly final|
+2534|116|10|6|12|12193.32|0.02|0.02|N|O|1996-07-29|1996-10-12|1996-08-14|TAKE BACK RETURN|AIR|sual depos|
+2534|173|3|7|17|18243.89|0.02|0.07|N|O|1996-07-22|1996-09-15|1996-08-03|NONE|SHIP|riously regular |
+2535|199|2|1|5|5495.95|0.06|0.01|A|F|1993-09-07|1993-07-25|1993-09-29|DELIVER IN PERSON|REG AIR|, unusual reque|
+2535|39|5|2|12|11268.36|0.08|0.05|A|F|1993-07-17|1993-08-17|1993-07-31|TAKE BACK RETURN|FOB|uses sleep among the packages. excuses |
+2535|54|5|3|5|4770.25|0.09|0.06|R|F|1993-07-28|1993-08-14|1993-08-11|DELIVER IN PERSON|SHIP| across the express requests. silent, eve|
+2535|160|5|4|19|20143.04|0.01|0.02|A|F|1993-06-01|1993-08-01|1993-06-19|DELIVER IN PERSON|FOB|ructions. final requests|
+2535|174|3|5|25|26854.25|0.07|0.04|A|F|1993-07-19|1993-08-07|1993-07-27|NONE|REG AIR|ions believe ab|
+2560|169|10|1|41|43835.56|0.07|0.01|R|F|1992-10-23|1992-11-11|1992-11-22|NONE|SHIP| after the accounts. regular foxes are be|
+2560|4|9|2|27|24408.00|0.00|0.01|R|F|1992-12-03|1992-11-16|1992-12-30|NONE|MAIL| against the carefully|
+2560|46|5|3|31|29327.24|0.01|0.05|A|F|1992-11-14|1992-10-14|1992-12-11|DELIVER IN PERSON|AIR|to beans. blithely regular Tiresias int|
+2560|72|1|4|36|34994.52|0.01|0.02|A|F|1992-10-18|1992-10-30|1992-11-05|TAKE BACK RETURN|MAIL|accounts alongside of the excuses are |
+2560|42|1|5|9|8478.36|0.04|0.02|A|F|1992-10-23|1992-10-29|1992-11-02|COLLECT COD|REG AIR| deposits affix quickly. unusual, eve|
+2560|108|9|6|13|13105.30|0.03|0.06|A|F|1992-09-07|1992-10-21|1992-09-24|COLLECT COD|FOB|slyly final accoun|
+2561|25|4|1|32|29600.64|0.02|0.01|N|O|1998-01-05|1997-12-28|1998-01-26|DELIVER IN PERSON|REG AIR|bold packages wake slyly. slyly|
+2561|98|1|2|5|4990.45|0.07|0.04|N|O|1997-12-27|1998-01-23|1998-01-13|TAKE BACK RETURN|AIR|p ironic, regular pinto beans.|
+2561|173|4|3|47|50438.99|0.04|0.02|N|O|1997-11-19|1998-01-21|1997-12-03|DELIVER IN PERSON|REG AIR|larly pending t|
+2561|108|9|4|39|39315.90|0.08|0.06|N|O|1998-01-20|1997-12-16|1998-02-05|TAKE BACK RETURN|MAIL|equests are furiously against the|
+2561|150|3|5|2|2100.30|0.04|0.08|N|O|1998-03-14|1998-01-21|1998-03-27|DELIVER IN PERSON|TRUCK|s are. silently silent foxes sleep about|
+2561|51|6|6|14|13314.70|0.02|0.03|N|O|1998-03-07|1998-02-04|1998-03-21|COLLECT COD|RAIL|ep unusual, ironic accounts|
+2562|53|5|1|28|26685.40|0.04|0.03|R|F|1992-10-04|1992-09-24|1992-10-09|COLLECT COD|MAIL|ans haggle special, special packages. |
+2562|148|9|2|1|1048.14|0.01|0.06|R|F|1992-10-16|1992-09-18|1992-10-17|NONE|TRUCK| slyly final ideas haggle car|
+2562|66|7|3|25|24151.50|0.05|0.03|A|F|1992-11-23|1992-10-08|1992-12-19|DELIVER IN PERSON|REG AIR| accounts-- silent, unusual ideas a|
+2562|148|1|4|37|38781.18|0.08|0.03|R|F|1992-10-29|1992-10-06|1992-11-09|COLLECT COD|FOB|. slyly regular ideas according to the fl|
+2562|160|8|5|29|30744.64|0.05|0.08|A|F|1992-11-01|1992-09-29|1992-11-13|TAKE BACK RETURN|MAIL|eep against the furiously r|
+2562|50|7|6|17|16150.85|0.01|0.06|A|F|1992-10-15|1992-10-08|1992-10-26|DELIVER IN PERSON|TRUCK|lar pinto beans. blithely ev|
+2563|65|4|1|10|9650.60|0.07|0.04|A|F|1994-01-26|1993-12-19|1994-01-28|DELIVER IN PERSON|AIR|tealthily abo|
+2563|167|4|2|28|29880.48|0.04|0.03|R|F|1994-03-17|1994-02-04|1994-04-13|TAKE BACK RETURN|RAIL|hely regular depe|
+2563|119|9|3|39|39745.29|0.07|0.00|R|F|1994-02-10|1993-12-31|1994-02-19|COLLECT COD|FOB|lent requests should integrate; carefully e|
+2563|90|1|4|50|49504.50|0.01|0.01|A|F|1994-01-26|1994-01-03|1994-02-09|DELIVER IN PERSON|SHIP|ly regular, regular excuses. bold plate|
+2563|15|6|5|42|38430.42|0.06|0.08|R|F|1994-02-21|1994-02-14|1994-03-04|DELIVER IN PERSON|AIR|ymptotes nag furiously slyly even inst|
+2563|121|2|6|5|5105.60|0.10|0.00|R|F|1993-12-27|1993-12-19|1994-01-02|DELIVER IN PERSON|REG AIR| the quickly final theodolite|
+2564|112|3|1|4|4048.44|0.02|0.00|R|F|1994-11-12|1994-10-29|1994-12-04|NONE|MAIL|y express requests sleep furi|
+2565|144|5|1|42|43853.88|0.04|0.08|N|O|1998-04-07|1998-04-02|1998-05-04|NONE|AIR|ngly silent |
+2565|189|10|2|26|28318.68|0.05|0.08|N|O|1998-05-07|1998-04-09|1998-05-15|DELIVER IN PERSON|TRUCK| pinto beans about the slyly regula|
+2565|115|5|3|34|34513.74|0.06|0.06|N|O|1998-03-19|1998-04-12|1998-04-17|DELIVER IN PERSON|SHIP|nstructions was carefu|
+2565|17|7|4|25|22925.25|0.10|0.08|N|O|1998-06-27|1998-05-20|1998-07-13|DELIVER IN PERSON|RAIL|, express accounts. final id|
+2565|76|7|5|26|25377.82|0.08|0.03|N|O|1998-03-05|1998-04-11|1998-03-11|TAKE BACK RETURN|AIR|ites wake. ironic acco|
+2565|141|4|6|48|49974.72|0.08|0.07|N|O|1998-06-18|1998-05-06|1998-07-13|DELIVER IN PERSON|TRUCK|r instructions sleep qui|
+2566|148|5|1|19|19914.66|0.06|0.07|R|F|1992-12-21|1992-11-24|1992-12-22|DELIVER IN PERSON|MAIL|ests. silent|
+2566|181|2|2|42|45409.56|0.08|0.02|R|F|1992-12-20|1992-12-22|1992-12-29|COLLECT COD|MAIL|ously ironic accounts|
+2566|23|8|3|18|16614.36|0.09|0.02|A|F|1992-11-16|1992-12-24|1992-12-16|COLLECT COD|FOB| braids according t|
+2566|42|9|4|3|2826.12|0.05|0.02|A|F|1992-11-04|1992-12-30|1992-12-04|TAKE BACK RETURN|FOB|ckages are ironic Tiresias. furious|
+2566|22|3|5|9|8298.18|0.04|0.03|R|F|1992-12-14|1992-12-28|1992-12-16|NONE|FOB|blithely bold accounts? quickl|
+2566|128|3|6|1|1028.12|0.07|0.03|A|F|1992-10-28|1992-11-20|1992-11-22|TAKE BACK RETURN|AIR|theodolites wake pending|
+2567|26|9|1|39|36114.78|0.03|0.04|N|O|1998-05-10|1998-05-10|1998-05-21|NONE|SHIP|ns. furiously final dependencies cajo|
+2567|112|3|2|50|50605.50|0.06|0.05|N|O|1998-05-05|1998-04-18|1998-05-09|DELIVER IN PERSON|TRUCK|. carefully pending foxes are furi|
+2567|52|10|3|6|5712.30|0.03|0.06|N|O|1998-04-21|1998-04-14|1998-05-11|NONE|RAIL|s cajole regular, final acco|
+2567|158|6|4|50|52907.50|0.05|0.03|N|O|1998-03-27|1998-05-25|1998-04-23|DELIVER IN PERSON|FOB|pinto beans? r|
+2567|81|2|5|46|45129.68|0.07|0.02|N|O|1998-06-02|1998-04-30|1998-06-13|COLLECT COD|AIR|efully pending epitaphs. carefully reg|
+2567|100|3|6|32|32003.20|0.01|0.07|N|O|1998-05-24|1998-04-30|1998-06-14|NONE|RAIL| the even, iro|
+2567|135|6|7|43|44510.59|0.06|0.02|N|O|1998-05-11|1998-04-15|1998-05-29|NONE|RAIL|requests. final courts cajole |
+2592|90|1|1|7|6930.63|0.10|0.04|R|F|1993-03-13|1993-04-25|1993-04-01|NONE|REG AIR| carefully special theodolites integrate |
+2592|66|1|2|2|1932.12|0.10|0.00|A|F|1993-03-24|1993-04-05|1993-04-16|DELIVER IN PERSON|RAIL|side of the b|
+2593|105|2|1|37|37188.70|0.08|0.06|R|F|1993-12-14|1993-10-08|1994-01-04|NONE|SHIP|s wake bravel|
+2593|90|1|2|28|27722.52|0.08|0.03|A|F|1993-10-30|1993-10-18|1993-11-06|DELIVER IN PERSON|SHIP|y even escapades shall|
+2593|128|3|3|6|6168.72|0.04|0.05|A|F|1993-11-28|1993-10-04|1993-12-28|TAKE BACK RETURN|REG AIR|ular packages. re|
+2593|161|10|4|44|46691.04|0.02|0.08|A|F|1993-09-05|1993-10-23|1993-09-29|NONE|RAIL|ents impress furiously; unusual theodoli|
+2593|4|5|5|3|2712.00|0.03|0.00|A|F|1993-12-16|1993-11-01|1993-12-29|COLLECT COD|SHIP|the furiously |
+2593|175|6|6|1|1075.17|0.08|0.08|A|F|1993-11-23|1993-10-25|1993-12-04|DELIVER IN PERSON|RAIL| accounts wake slyly |
+2593|192|5|7|11|12014.09|0.00|0.07|R|F|1993-11-01|1993-11-19|1993-11-28|TAKE BACK RETURN|RAIL|express packages sleep bold re|
+2594|72|3|1|7|6804.49|0.06|0.02|R|F|1993-03-26|1993-03-05|1993-04-24|DELIVER IN PERSON|FOB|arls cajole |
+2594|124|9|2|13|13313.56|0.10|0.05|R|F|1993-02-06|1993-03-01|1993-02-23|TAKE BACK RETURN|TRUCK|fully special accounts use courts|
+2594|126|1|3|24|24626.88|0.03|0.00|A|F|1993-01-31|1993-03-10|1993-02-04|COLLECT COD|REG AIR|lar accounts sleep fur|
+2594|144|7|4|46|48030.44|0.00|0.08|R|F|1993-04-17|1993-03-06|1993-04-21|TAKE BACK RETURN|SHIP|beans. instructions across t|
+2595|61|2|1|42|40364.52|0.08|0.02|N|O|1996-03-24|1996-01-28|1996-04-10|DELIVER IN PERSON|MAIL|ggle furiou|
+2595|88|9|2|30|29642.40|0.05|0.01|N|O|1996-03-05|1996-02-23|1996-03-19|NONE|AIR|ctions. regula|
+2595|24|3|3|19|17556.38|0.01|0.05|N|O|1995-12-23|1996-03-02|1996-01-17|COLLECT COD|MAIL|ns are neve|
+2595|159|1|4|29|30715.35|0.07|0.05|N|O|1996-01-01|1996-02-13|1996-01-18|TAKE BACK RETURN|RAIL|ronic accounts haggle carefully fin|
+2595|86|7|5|30|29582.40|0.09|0.07|N|O|1996-03-16|1996-01-31|1996-04-05|TAKE BACK RETURN|FOB|. final orbits cajole |
+2595|82|3|6|31|30444.48|0.06|0.04|N|O|1996-02-07|1996-02-10|1996-03-05|DELIVER IN PERSON|AIR|tipliers w|
+2596|170|5|1|6|6421.02|0.05|0.01|N|O|1996-12-15|1996-11-02|1996-12-29|TAKE BACK RETURN|TRUCK|ily special re|
+2596|139|10|2|43|44682.59|0.07|0.03|N|O|1996-09-03|1996-10-26|1996-09-15|NONE|FOB|ial packages haggl|
+2596|39|5|3|19|17841.57|0.10|0.00|N|O|1996-09-02|1996-11-03|1996-09-06|COLLECT COD|AIR|ias mold! sp|
+2596|105|6|4|10|10051.00|0.06|0.05|N|O|1996-08-25|1996-11-05|1996-09-13|DELIVER IN PERSON|REG AIR| instructions shall have|
+2597|84|5|1|24|23617.92|0.07|0.00|A|F|1993-05-15|1993-03-06|1993-05-25|TAKE BACK RETURN|FOB|pending packages. enticingly fi|
+2598|7|4|1|12|10884.00|0.00|0.01|N|O|1996-06-17|1996-04-12|1996-06-24|COLLECT COD|TRUCK|express packages nag sly|
+2598|148|7|2|40|41925.60|0.07|0.02|N|O|1996-05-11|1996-05-19|1996-06-08|TAKE BACK RETURN|AIR|the enticing|
+2598|104|9|3|4|4016.40|0.03|0.03|N|O|1996-05-23|1996-05-13|1996-05-25|COLLECT COD|AIR| across the furiously fi|
+2598|23|2|4|19|17537.38|0.02|0.00|N|O|1996-04-09|1996-05-30|1996-04-17|TAKE BACK RETURN|RAIL|nic packages. even accounts|
+2598|106|3|5|12|12073.20|0.01|0.08|N|O|1996-04-14|1996-04-24|1996-04-21|TAKE BACK RETURN|REG AIR|eposits cajol|
+2599|101|4|1|11|11012.10|0.08|0.08|N|O|1997-02-01|1996-12-14|1997-02-27|TAKE BACK RETURN|FOB| express accoun|
+2599|42|5|2|26|24493.04|0.03|0.04|N|O|1996-11-08|1996-12-21|1996-11-24|TAKE BACK RETURN|AIR|nag carefully |
+2599|99|10|3|29|28973.61|0.09|0.03|N|O|1997-01-10|1996-12-10|1997-02-02|COLLECT COD|RAIL|ly express dolphins. special, |
+2624|63|10|1|15|14445.90|0.03|0.07|N|O|1997-02-28|1997-02-19|1997-03-21|DELIVER IN PERSON|AIR|le. quickly pending requests|
+2624|189|10|2|12|13070.16|0.07|0.00|N|O|1997-02-24|1997-02-22|1997-02-27|DELIVER IN PERSON|SHIP|er the quickly unu|
+2625|20|1|1|42|38640.84|0.02|0.04|R|F|1992-10-18|1992-11-17|1992-10-23|DELIVER IN PERSON|AIR| even accounts haggle furiously|
+2626|22|5|1|45|41490.90|0.09|0.04|N|O|1995-11-22|1995-11-01|1995-11-23|NONE|AIR|deposits wake blithely according to |
+2626|175|3|2|2|2150.34|0.05|0.07|N|O|1995-10-19|1995-11-09|1995-10-24|TAKE BACK RETURN|FOB|uffy accounts haggle furiously above|
+2626|154|2|3|40|42166.00|0.05|0.07|N|O|1995-09-28|1995-12-03|1995-10-10|NONE|REG AIR|eans. ironic deposits haggle. depo|
+2627|131|7|1|28|28871.64|0.09|0.02|R|F|1992-05-14|1992-05-09|1992-05-31|COLLECT COD|SHIP|ggedly final excuses nag packages. f|
+2628|106|9|1|44|44268.40|0.07|0.03|R|F|1994-01-11|1994-01-14|1994-01-13|DELIVER IN PERSON|SHIP|lyly final, pending ide|
+2628|106|9|2|14|14085.40|0.01|0.03|A|F|1994-01-28|1993-11-30|1994-02-20|TAKE BACK RETURN|SHIP|g the furiously unusual pi|
+2628|64|9|3|42|40490.52|0.00|0.00|A|F|1993-11-20|1994-01-04|1993-12-19|DELIVER IN PERSON|TRUCK|ld notornis alongside |
+2628|95|7|4|23|22887.07|0.08|0.04|A|F|1993-10-27|1994-01-08|1993-11-12|DELIVER IN PERSON|TRUCK|usual packages sleep about the fina|
+2628|90|1|5|50|49504.50|0.07|0.01|A|F|1994-01-13|1993-12-11|1994-01-14|NONE|AIR|posits serve carefully toward |
+2629|118|9|1|6|6108.66|0.06|0.05|N|O|1998-06-10|1998-05-29|1998-06-13|DELIVER IN PERSON|SHIP|dolites hinder bli|
+2629|124|7|2|31|31747.72|0.08|0.03|N|O|1998-05-24|1998-05-26|1998-06-10|COLLECT COD|AIR|ate blithely bold, regular deposits. bold|
+2629|128|9|3|29|29815.48|0.08|0.07|N|O|1998-07-09|1998-06-17|1998-07-12|TAKE BACK RETURN|AIR|eposits serve unusual, express i|
+2629|70|5|4|33|32012.31|0.06|0.03|N|O|1998-05-29|1998-05-14|1998-05-30|NONE|TRUCK|es. slowly express accounts are along the|
+2630|29|8|1|46|42734.92|0.05|0.03|R|F|1992-11-05|1992-12-17|1992-12-05|TAKE BACK RETURN|MAIL|uests cajole. e|
+2630|57|2|2|8|7656.40|0.09|0.07|A|F|1992-11-16|1993-01-01|1992-12-07|DELIVER IN PERSON|TRUCK|indle fluffily silent, ironic pi|
+2630|173|2|3|45|48292.65|0.08|0.07|A|F|1993-01-04|1993-01-11|1993-01-09|NONE|FOB|edly express ideas. carefully final |
+2630|162|9|4|29|30802.64|0.08|0.07|A|F|1992-12-03|1993-01-04|1992-12-12|DELIVER IN PERSON|SHIP|efully unusual dependencies. even i|
+2631|122|7|1|42|42929.04|0.00|0.03|A|F|1994-01-04|1993-12-01|1994-01-16|TAKE BACK RETURN|SHIP|ect carefully at the furiously final the|
+2631|67|4|2|4|3868.24|0.07|0.06|R|F|1993-11-03|1993-12-17|1993-11-05|COLLECT COD|AIR|special theodolites. a|
+2631|118|8|3|15|15271.65|0.06|0.05|A|F|1993-09-30|1993-11-06|1993-10-13|DELIVER IN PERSON|SHIP|y. furiously even pinto be|
+2656|181|2|1|10|10811.80|0.02|0.06|R|F|1993-06-28|1993-07-04|1993-07-12|TAKE BACK RETURN|TRUCK|s nag regularly about the deposits. slyly|
+2656|137|8|2|38|39410.94|0.07|0.02|A|F|1993-06-25|1993-06-04|1993-07-24|NONE|RAIL|structions wake along the furio|
+2656|2|5|3|19|17138.00|0.03|0.02|R|F|1993-08-03|1993-07-25|1993-08-20|TAKE BACK RETURN|MAIL|ts serve deposi|
+2656|110|3|4|40|40404.40|0.05|0.04|R|F|1993-06-09|1993-07-24|1993-06-21|DELIVER IN PERSON|RAIL|refully final pearls. final ideas wake. qu|
+2657|115|9|1|22|22332.42|0.02|0.03|N|O|1995-12-08|1995-12-28|1995-12-21|TAKE BACK RETURN|MAIL|r ideas. furiously special dolphins|
+2657|165|2|2|15|15977.40|0.08|0.05|N|O|1995-12-09|1995-12-16|1995-12-18|NONE|RAIL|ole carefully above the ironic ideas. b|
+2657|79|9|3|25|24476.75|0.02|0.04|N|O|1995-10-21|1995-12-12|1995-11-09|COLLECT COD|FOB|lly pinto beans. final |
+2657|55|7|4|11|10505.55|0.04|0.08|N|O|1995-11-19|1995-12-11|1995-11-24|COLLECT COD|TRUCK|ckly enticing requests. fur|
+2657|78|9|5|42|41078.94|0.06|0.03|N|O|1996-01-23|1995-11-22|1996-01-25|COLLECT COD|RAIL|ckly slyly even accounts. platelets x-ray|
+2657|194|7|6|31|33919.89|0.01|0.03|N|O|1995-11-10|1995-11-27|1995-12-06|COLLECT COD|RAIL|re blithely |
+2658|132|3|1|41|42317.33|0.05|0.04|N|O|1995-11-07|1995-11-04|1995-12-04|NONE|MAIL|eposits. furiously final theodolite|
+2658|29|4|2|22|20438.44|0.08|0.05|N|O|1995-11-12|1995-11-18|1995-11-14|DELIVER IN PERSON|TRUCK|ts cajole. pending packages affix|
+2658|18|5|3|13|11934.13|0.07|0.06|N|O|1995-10-24|1995-12-12|1995-11-14|COLLECT COD|FOB|s kindle blithely regular accounts.|
+2658|92|5|4|22|21825.98|0.04|0.04|N|O|1995-12-02|1995-11-03|1995-12-26|DELIVER IN PERSON|SHIP| dependencies. blithely pending foxes abou|
+2658|7|8|5|45|40815.00|0.03|0.01|N|O|1995-11-02|1995-11-08|1995-11-29|DELIVER IN PERSON|MAIL|e special requests. quickly ex|
+2658|147|4|6|27|28272.78|0.05|0.07|N|O|1995-09-26|1995-12-08|1995-09-30|NONE|AIR|ecial packages use abov|
+2659|42|1|1|28|26377.12|0.08|0.05|A|F|1994-03-17|1994-01-24|1994-03-19|NONE|FOB|idle tithes|
+2659|43|2|2|21|19803.84|0.00|0.00|A|F|1993-12-23|1994-02-10|1994-01-17|DELIVER IN PERSON|RAIL|y beyond the furiously even co|
+2659|135|1|3|24|24843.12|0.04|0.03|R|F|1994-03-28|1994-02-20|1994-04-05|DELIVER IN PERSON|REG AIR| haggle carefully |
+2659|119|6|4|2|2038.22|0.00|0.08|R|F|1994-02-19|1994-03-12|1994-02-21|NONE|MAIL|sts above the fluffily express fo|
+2659|7|4|5|9|8163.00|0.08|0.03|A|F|1994-02-07|1994-03-17|1994-03-04|DELIVER IN PERSON|AIR|ly final packages sleep ac|
+2660|48|7|1|17|16116.68|0.00|0.05|N|O|1995-08-18|1995-09-13|1995-09-17|NONE|SHIP|al pinto beans wake after the furious|
+2661|178|9|1|31|33423.27|0.03|0.02|N|O|1997-04-07|1997-03-10|1997-04-23|TAKE BACK RETURN|AIR|e ironicall|
+2661|103|8|2|22|22068.20|0.08|0.02|N|O|1997-03-14|1997-03-17|1997-04-08|COLLECT COD|REG AIR| foxes affix quickly ironic request|
+2661|67|6|3|11|10637.66|0.00|0.08|N|O|1997-04-14|1997-02-11|1997-05-05|TAKE BACK RETURN|FOB|equests are a|
+2661|137|8|4|41|42522.33|0.06|0.02|N|O|1997-03-06|1997-03-27|1997-03-15|DELIVER IN PERSON|AIR|iously ironically ironic requests. |
+2662|102|5|1|43|43090.30|0.09|0.07|N|O|1996-11-24|1996-11-04|1996-12-08|NONE|RAIL|. slyly specia|
+2662|128|9|2|8|8224.96|0.02|0.07|N|O|1996-09-10|1996-10-09|1996-09-21|TAKE BACK RETURN|REG AIR|ajole carefully. sp|
+2662|2|5|3|6|5412.00|0.02|0.00|N|O|1996-11-30|1996-09-20|1996-12-03|DELIVER IN PERSON|REG AIR|olites cajole quickly along the b|
+2662|30|1|4|34|31621.02|0.06|0.07|N|O|1996-10-04|1996-11-05|1996-10-19|NONE|SHIP|ding theodolites use carefully. p|
+2663|114|4|1|35|35493.85|0.02|0.01|N|O|1995-12-11|1995-10-16|1996-01-07|TAKE BACK RETURN|REG AIR|tect. slyly fina|
+2688|18|5|1|45|41310.45|0.08|0.08|R|F|1992-05-21|1992-04-14|1992-05-28|NONE|FOB|sits run carefully|
+2688|15|6|2|46|42090.46|0.01|0.01|R|F|1992-05-24|1992-04-01|1992-05-26|COLLECT COD|TRUCK|elets. regular reque|
+2688|89|10|3|30|29672.40|0.05|0.04|A|F|1992-04-18|1992-03-18|1992-05-18|TAKE BACK RETURN|RAIL|ithely final |
+2688|25|10|4|3|2775.06|0.00|0.03|R|F|1992-02-04|1992-03-18|1992-02-24|DELIVER IN PERSON|RAIL|e fluffily |
+2688|59|10|5|22|21099.10|0.02|0.05|R|F|1992-02-09|1992-04-09|1992-02-11|DELIVER IN PERSON|RAIL|press, ironic excuses wake carefully id|
+2688|149|10|6|42|44063.88|0.01|0.01|R|F|1992-04-29|1992-04-04|1992-05-17|TAKE BACK RETURN|FOB|lly even account|
+2689|6|1|1|45|40770.00|0.02|0.04|R|F|1992-04-29|1992-06-22|1992-04-30|COLLECT COD|SHIP|e quickly. carefully silent|
+2690|140|1|1|44|45766.16|0.05|0.06|N|O|1996-05-30|1996-05-19|1996-06-26|NONE|REG AIR|ly alongside of th|
+2690|51|2|2|50|47552.50|0.03|0.03|N|O|1996-06-13|1996-05-22|1996-06-14|DELIVER IN PERSON|MAIL| doubt careful|
+2690|125|6|3|45|46130.40|0.02|0.07|N|O|1996-05-23|1996-06-02|1996-05-29|DELIVER IN PERSON|MAIL|ounts. slyly regular dependencies wa|
+2690|195|6|4|12|13142.28|0.04|0.07|N|O|1996-07-18|1996-06-03|1996-07-25|NONE|AIR|nal, regular atta|
+2690|86|7|5|30|29582.40|0.01|0.08|N|O|1996-05-20|1996-06-01|1996-06-04|TAKE BACK RETURN|SHIP|d accounts above the express req|
+2690|189|10|6|3|3267.54|0.07|0.01|N|O|1996-07-04|1996-05-28|1996-07-06|TAKE BACK RETURN|RAIL|. final reques|
+2690|79|7|7|35|34267.45|0.05|0.06|N|O|1996-07-25|1996-05-14|1996-08-03|COLLECT COD|FOB|y silent pinto be|
+2691|91|3|1|11|10901.99|0.04|0.07|R|F|1992-06-21|1992-06-08|1992-07-09|COLLECT COD|FOB|leep alongside of the accounts. slyly ironi|
+2691|48|7|2|2|1896.08|0.00|0.07|R|F|1992-05-10|1992-06-04|1992-05-11|TAKE BACK RETURN|TRUCK|s cajole at the blithely ironic warthog|
+2691|162|3|3|16|16994.56|0.09|0.03|R|F|1992-06-11|1992-07-29|1992-06-29|NONE|RAIL|bove the even foxes. unusual theodoli|
+2691|166|3|4|1|1066.16|0.08|0.00|A|F|1992-08-11|1992-06-07|1992-08-16|NONE|SHIP|egular instructions b|
+2692|17|1|1|3|2751.03|0.10|0.04|N|O|1998-02-25|1998-01-29|1998-03-27|TAKE BACK RETURN|MAIL|equests. bold, even foxes haggle slyl|
+2692|114|1|2|21|21296.31|0.03|0.05|N|O|1998-03-11|1998-02-11|1998-03-19|NONE|SHIP|posits. final, express requests nag furi|
+2693|9|10|1|26|23634.00|0.04|0.00|N|O|1996-09-14|1996-10-07|1996-10-03|COLLECT COD|MAIL|cajole alo|
+2693|102|3|2|43|43090.30|0.03|0.04|N|O|1996-10-24|1996-10-24|1996-11-03|TAKE BACK RETURN|TRUCK|as are according to th|
+2694|153|1|1|30|31594.50|0.02|0.06|N|O|1996-06-20|1996-06-01|1996-07-15|NONE|TRUCK|oxes. never iro|
+2694|157|2|2|35|37000.25|0.07|0.03|N|O|1996-05-24|1996-06-01|1996-05-25|NONE|RAIL|atelets past the furiously final deposits |
+2694|19|3|3|15|13785.15|0.08|0.02|N|O|1996-06-30|1996-05-01|1996-07-25|TAKE BACK RETURN|REG AIR|e blithely even platelets. special wa|
+2694|20|10|4|12|11040.24|0.00|0.05|N|O|1996-04-24|1996-04-22|1996-05-14|DELIVER IN PERSON|RAIL|foxes atop the hockey pla|
+2694|108|9|5|10|10081.00|0.08|0.08|N|O|1996-06-23|1996-05-28|1996-06-27|COLLECT COD|REG AIR|fluffily fluffy accounts. even packages hi|
+2695|184|5|1|21|22767.78|0.07|0.00|N|O|1996-10-04|1996-11-02|1996-10-21|NONE|MAIL|y regular pinto beans. evenly regular packa|
+2695|19|9|2|44|40436.44|0.09|0.07|N|O|1996-10-05|1996-10-10|1996-11-01|NONE|MAIL|ts. busy platelets boost|
+2695|144|7|3|21|21926.94|0.02|0.07|N|O|1996-09-13|1996-09-25|1996-10-13|NONE|TRUCK|s. furiously ironic platelets ar|
+2695|58|6|4|16|15328.80|0.08|0.08|N|O|1996-11-16|1996-10-05|1996-11-22|NONE|TRUCK|its. theodolites sleep slyly|
+2695|86|7|5|40|39443.20|0.02|0.03|N|O|1996-11-02|1996-10-26|1996-11-14|NONE|FOB|ructions. pending|
+2720|45|6|1|5|4725.20|0.10|0.06|A|F|1993-06-24|1993-08-08|1993-07-08|NONE|FOB|ously ironic foxes thrash|
+2720|17|8|2|42|38514.42|0.09|0.03|R|F|1993-07-25|1993-07-23|1993-08-23|COLLECT COD|REG AIR|fter the inst|
+2720|120|1|3|50|51006.00|0.10|0.02|A|F|1993-08-10|1993-07-29|1993-09-06|NONE|SHIP|l requests. deposits nag furiously|
+2720|109|2|4|49|49445.90|0.06|0.02|A|F|1993-07-09|1993-07-14|1993-07-13|NONE|REG AIR| accounts. fluffily bold pack|
+2720|121|6|5|27|27570.24|0.04|0.00|R|F|1993-06-29|1993-08-06|1993-07-28|NONE|TRUCK|eas. carefully regular |
+2721|183|4|1|49|53075.82|0.00|0.08|N|O|1996-02-14|1996-04-26|1996-03-02|DELIVER IN PERSON|AIR|ounts poach carefu|
+2721|3|4|2|2|1806.00|0.02|0.05|N|O|1996-02-13|1996-03-14|1996-02-28|TAKE BACK RETURN|TRUCK| slyly final requests against |
+2722|124|7|1|21|21506.52|0.09|0.01|A|F|1994-07-29|1994-06-26|1994-08-09|NONE|RAIL|e carefully around the furiously ironic pac|
+2722|146|7|2|15|15692.10|0.05|0.03|R|F|1994-07-02|1994-06-01|1994-07-13|COLLECT COD|AIR|refully final asympt|
+2722|34|10|3|16|14944.48|0.04|0.06|R|F|1994-05-25|1994-06-09|1994-05-26|NONE|MAIL|ts besides the fluffy,|
+2723|13|7|1|47|42911.47|0.09|0.07|N|O|1995-12-05|1995-11-19|1995-12-11|TAKE BACK RETURN|AIR|furiously r|
+2723|32|3|2|10|9320.30|0.06|0.08|N|O|1995-11-27|1995-11-29|1995-12-12|DELIVER IN PERSON|MAIL|al, special r|
+2723|162|1|3|2|2124.32|0.10|0.01|N|O|1995-11-09|1995-11-10|1995-11-14|TAKE BACK RETURN|FOB| courts boost quickly about th|
+2723|82|3|4|12|11784.96|0.01|0.05|N|O|1995-12-24|1995-11-15|1996-01-17|DELIVER IN PERSON|RAIL|bold foxes are bold packages. regular, fin|
+2723|129|10|5|40|41164.80|0.09|0.05|N|O|1995-11-17|1995-11-22|1995-11-18|TAKE BACK RETURN|MAIL|unwind fluffily carefully regular realms.|
+2724|92|4|1|47|46628.23|0.09|0.01|A|F|1994-11-23|1994-11-13|1994-12-03|COLLECT COD|TRUCK|unusual patterns nag. special p|
+2724|147|8|2|21|21989.94|0.09|0.02|A|F|1994-11-25|1994-10-15|1994-12-07|COLLECT COD|RAIL|as. carefully regular dependencies wak|
+2724|50|3|3|22|20901.10|0.04|0.06|A|F|1994-09-19|1994-11-18|1994-10-17|TAKE BACK RETURN|TRUCK|express fo|
+2724|35|6|4|1|935.03|0.07|0.03|A|F|1994-12-26|1994-11-27|1995-01-07|NONE|MAIL|lyly carefully blithe theodolites-- pl|
+2724|149|2|5|29|30425.06|0.05|0.06|A|F|1995-01-10|1994-11-17|1995-02-04|COLLECT COD|MAIL|l requests hagg|
+2725|118|2|1|23|23416.53|0.10|0.08|R|F|1994-08-25|1994-06-22|1994-08-28|TAKE BACK RETURN|REG AIR|y regular deposits. brave foxes |
+2725|5|8|2|41|37105.00|0.01|0.00|R|F|1994-07-05|1994-06-29|1994-08-02|DELIVER IN PERSON|TRUCK|ns sleep furiously c|
+2725|189|10|3|15|16337.70|0.07|0.03|R|F|1994-08-06|1994-08-09|1994-08-15|TAKE BACK RETURN|AIR|? furiously regular a|
+2726|1|6|1|50|45050.00|0.00|0.06|R|F|1993-03-04|1993-01-29|1993-03-28|COLLECT COD|TRUCK| furiously bold theodolites|
+2727|151|6|1|3|3153.45|0.03|0.01|N|O|1998-06-18|1998-06-06|1998-06-23|NONE|RAIL| the carefully regular foxes u|
+2752|31|2|1|41|38172.23|0.02|0.05|A|F|1994-03-02|1994-01-31|1994-03-06|DELIVER IN PERSON|AIR|tructions hag|
+2752|7|2|2|29|26303.00|0.02|0.04|R|F|1994-01-22|1994-01-08|1994-01-28|COLLECT COD|TRUCK|gly blithely re|
+2752|56|7|3|4|3824.20|0.08|0.00|A|F|1993-12-14|1994-02-13|1994-01-05|DELIVER IN PERSON|TRUCK|telets haggle. regular, final |
+2752|24|7|4|40|36960.80|0.09|0.06|A|F|1994-01-24|1994-01-18|1994-02-22|DELIVER IN PERSON|MAIL|into beans are after the sly|
+2752|126|5|5|22|22574.64|0.03|0.04|A|F|1994-03-20|1994-02-08|1994-04-01|TAKE BACK RETURN|TRUCK|equests nag. regular dependencies are furio|
+2752|170|5|6|21|22473.57|0.09|0.05|R|F|1994-01-01|1994-01-24|1994-01-24|COLLECT COD|SHIP| along the quickly |
+2752|199|10|7|38|41769.22|0.08|0.00|R|F|1994-02-23|1993-12-23|1994-03-24|DELIVER IN PERSON|SHIP|es boost. slyly silent ideas|
+2753|13|3|1|6|5478.06|0.10|0.04|A|F|1993-12-30|1994-01-28|1994-01-29|COLLECT COD|TRUCK|s accounts|
+2753|48|7|2|40|37921.60|0.03|0.05|A|F|1994-01-06|1994-02-13|1994-02-03|DELIVER IN PERSON|SHIP|latelets kindle slyly final depos|
+2753|89|10|3|30|29672.40|0.00|0.07|A|F|1994-01-26|1994-01-29|1994-02-02|NONE|RAIL|ans wake fluffily blithely iro|
+2753|31|7|4|7|6517.21|0.07|0.03|R|F|1994-02-11|1994-01-22|1994-03-10|DELIVER IN PERSON|AIR|xpress ideas detect b|
+2753|137|8|5|36|37336.68|0.04|0.08|R|F|1994-03-15|1994-01-03|1994-04-03|DELIVER IN PERSON|SHIP|gle slyly final c|
+2753|50|1|6|17|16150.85|0.01|0.08|A|F|1994-03-08|1994-01-17|1994-03-11|TAKE BACK RETURN|REG AIR| carefully bold deposits sublate s|
+2753|148|9|7|20|20962.80|0.01|0.06|R|F|1994-02-24|1994-02-04|1994-03-23|DELIVER IN PERSON|FOB| express pack|
+2754|149|6|1|4|4196.56|0.05|0.08|A|F|1994-07-13|1994-05-15|1994-08-02|NONE|REG AIR|blithely silent requests. regular depo|
+2754|177|5|2|19|20466.23|0.01|0.07|A|F|1994-06-27|1994-05-06|1994-06-28|NONE|FOB|latelets hag|
+2755|92|4|1|19|18849.71|0.10|0.00|R|F|1992-02-11|1992-03-15|1992-02-14|TAKE BACK RETURN|MAIL|furiously special deposits|
+2755|24|3|2|11|10164.22|0.03|0.08|A|F|1992-04-12|1992-05-07|1992-04-21|COLLECT COD|RAIL|egular excuses sleep carefully.|
+2755|64|3|3|21|20245.26|0.08|0.04|R|F|1992-02-13|1992-04-20|1992-03-02|NONE|AIR|furious re|
+2755|131|7|4|5|5155.65|0.01|0.00|A|F|1992-02-27|1992-04-07|1992-03-09|TAKE BACK RETURN|AIR|e the furi|
+2755|116|7|5|48|48773.28|0.05|0.06|R|F|1992-03-22|1992-03-10|1992-04-14|DELIVER IN PERSON|MAIL|yly even epitaphs for the |
+2756|118|9|1|35|35633.85|0.03|0.02|R|F|1994-06-08|1994-06-01|1994-06-21|TAKE BACK RETURN|AIR| deposits grow bold sheaves; iro|
+2756|80|9|2|47|46063.76|0.06|0.01|R|F|1994-05-10|1994-05-25|1994-05-13|NONE|AIR|e final, f|
+2756|105|8|3|31|31158.10|0.01|0.07|A|F|1994-07-27|1994-07-06|1994-08-22|TAKE BACK RETURN|TRUCK|en instructions use quickly.|
+2756|72|2|4|30|29162.10|0.00|0.04|A|F|1994-06-05|1994-06-30|1994-06-14|DELIVER IN PERSON|TRUCK|ular packages. regular deposi|
+2757|148|5|1|26|27251.64|0.07|0.00|N|O|1995-08-19|1995-10-02|1995-09-06|DELIVER IN PERSON|MAIL|around the blithely|
+2757|22|7|2|12|11064.24|0.07|0.08|N|O|1995-08-01|1995-09-04|1995-08-08|TAKE BACK RETURN|SHIP| regular, eve|
+2757|73|3|3|17|16542.19|0.10|0.04|N|O|1995-09-06|1995-09-27|1995-09-22|DELIVER IN PERSON|AIR|er the furiously silent |
+2757|140|1|4|25|26003.50|0.08|0.01|N|O|1995-11-09|1995-09-12|1995-11-23|NONE|AIR|uickly regular |
+2757|70|7|5|14|13580.98|0.04|0.05|N|O|1995-09-01|1995-08-24|1995-09-03|TAKE BACK RETURN|SHIP|special deposits u|
+2758|121|10|1|20|20422.40|0.02|0.04|N|O|1998-07-27|1998-09-10|1998-08-21|TAKE BACK RETURN|AIR|ptotes sleep furiously|
+2758|23|8|2|17|15691.34|0.10|0.06|N|O|1998-09-25|1998-10-03|1998-10-25|NONE|MAIL| accounts! qui|
+2758|26|5|3|1|926.02|0.06|0.02|N|O|1998-10-09|1998-09-15|1998-10-16|NONE|TRUCK|ake furious|
+2759|59|1|1|10|9590.50|0.10|0.03|R|F|1993-12-14|1994-01-08|1994-01-01|COLLECT COD|FOB|s. busily ironic theodo|
+2759|113|10|2|37|37485.07|0.00|0.06|R|F|1994-03-05|1994-02-22|1994-03-18|DELIVER IN PERSON|REG AIR|lar Tiresias affix ironically carefully sp|
+2759|112|9|3|11|11133.21|0.03|0.08|A|F|1994-01-24|1994-01-16|1994-02-21|DELIVER IN PERSON|TRUCK|hely regular |
+2759|23|2|4|31|28613.62|0.02|0.05|A|F|1994-01-11|1994-01-15|1994-01-23|NONE|SHIP|ithely aft|
+2784|33|4|1|45|41986.35|0.03|0.01|N|O|1998-02-15|1998-04-07|1998-02-26|COLLECT COD|AIR|yly along the asymptotes. reque|
+2784|54|5|2|23|21943.15|0.03|0.05|N|O|1998-03-28|1998-02-07|1998-04-17|DELIVER IN PERSON|AIR|uests lose after |
+2784|175|4|3|40|43006.80|0.07|0.01|N|O|1998-04-28|1998-03-19|1998-05-03|DELIVER IN PERSON|TRUCK|deas nag furiously never unusual |
+2784|29|10|4|3|2787.06|0.04|0.03|N|O|1998-01-19|1998-04-05|1998-02-05|TAKE BACK RETURN|AIR|n packages. foxes haggle quickly sile|
+2785|100|3|1|34|34003.40|0.08|0.06|N|O|1995-08-07|1995-09-09|1995-09-05|NONE|RAIL|ly final packages haggl|
+2785|110|7|2|37|37374.07|0.08|0.04|N|O|1995-07-25|1995-09-12|1995-08-06|DELIVER IN PERSON|TRUCK|tructions. furiously |
+2785|65|10|3|33|31846.98|0.08|0.06|N|O|1995-10-16|1995-08-24|1995-11-02|DELIVER IN PERSON|MAIL|fter the furiously final p|
+2785|48|1|4|34|32233.36|0.00|0.02|N|O|1995-09-16|1995-09-09|1995-10-11|COLLECT COD|SHIP|kages wake carefully silent |
+2786|136|2|1|15|15541.95|0.03|0.04|A|F|1992-05-19|1992-05-08|1992-05-28|COLLECT COD|TRUCK|low deposits are ironic|
+2786|51|3|2|42|39944.10|0.10|0.04|R|F|1992-05-15|1992-04-22|1992-05-30|DELIVER IN PERSON|AIR|unts are against the furious|
+2786|156|1|3|41|43302.15|0.04|0.05|R|F|1992-07-01|1992-06-04|1992-07-13|COLLECT COD|RAIL|ix requests. bold requests a|
+2786|23|4|4|24|22152.48|0.05|0.02|A|F|1992-04-04|1992-06-09|1992-05-02|DELIVER IN PERSON|MAIL|ans. slyly unusual platelets detect. unus|
+2786|50|3|5|43|40852.15|0.06|0.03|R|F|1992-04-22|1992-05-13|1992-04-29|NONE|RAIL|ons. theodolites after|
+2786|162|1|6|21|22305.36|0.08|0.00|A|F|1992-05-03|1992-05-01|1992-05-14|COLLECT COD|AIR|slow instructi|
+2787|33|9|1|4|3732.12|0.04|0.04|N|O|1996-01-26|1995-11-26|1996-02-20|TAKE BACK RETURN|SHIP|ts. instructions nag furiously according |
+2788|177|8|1|16|17234.72|0.06|0.06|A|F|1994-10-04|1994-11-25|1994-10-18|DELIVER IN PERSON|AIR| requests wake carefully. carefully si|
+2789|163|8|1|16|17010.56|0.03|0.02|N|O|1998-04-18|1998-05-25|1998-05-12|DELIVER IN PERSON|REG AIR|o beans use carefully|
+2789|23|4|2|41|37843.82|0.02|0.05|N|O|1998-03-20|1998-05-15|1998-03-21|COLLECT COD|MAIL|d packages-- fluffily specia|
+2789|176|5|3|33|35513.61|0.06|0.02|N|O|1998-04-21|1998-05-02|1998-04-30|COLLECT COD|TRUCK|deposits. ironic |
+2789|16|3|4|47|43052.47|0.02|0.04|N|O|1998-03-29|1998-05-05|1998-04-07|NONE|RAIL|usly busy packages wake against the unusual|
+2789|197|1|5|23|25235.37|0.02|0.07|N|O|1998-03-25|1998-05-10|1998-04-24|COLLECT COD|RAIL|cording to the careful de|
+2789|144|5|6|16|16706.24|0.07|0.03|N|O|1998-05-11|1998-05-08|1998-05-24|TAKE BACK RETURN|RAIL|d the carefully iron|
+2789|133|4|7|42|43391.46|0.01|0.00|N|O|1998-04-28|1998-05-17|1998-05-24|TAKE BACK RETURN|AIR|ending packages shoul|
+2790|185|6|1|27|29299.86|0.06|0.08|R|F|1994-09-04|1994-09-27|1994-09-16|TAKE BACK RETURN|MAIL|ilent packages cajole. quickly ironic requ|
+2790|117|1|2|50|50855.50|0.00|0.06|A|F|1994-12-08|1994-11-17|1994-12-19|NONE|RAIL|fter the regular ideas. f|
+2790|184|5|3|19|20599.42|0.06|0.00|R|F|1994-10-23|1994-10-03|1994-10-26|TAKE BACK RETURN|RAIL|uffily even excuses. furiously thin|
+2790|197|8|4|24|26332.56|0.07|0.01|A|F|1994-12-04|1994-10-10|1994-12-25|NONE|MAIL|ments. slyly f|
+2790|148|9|5|11|11529.54|0.08|0.03|A|F|1994-09-28|1994-11-14|1994-10-04|TAKE BACK RETURN|AIR|lar requests poach slyly foxes|
+2790|73|3|6|13|12649.91|0.08|0.00|R|F|1994-09-20|1994-10-10|1994-10-20|COLLECT COD|SHIP|n deposits according to the regul|
+2790|4|1|7|32|28928.00|0.08|0.02|A|F|1994-09-25|1994-10-26|1994-10-01|NONE|SHIP|ully pending|
+2791|59|10|1|49|46993.45|0.10|0.04|A|F|1995-01-11|1994-11-10|1995-02-08|COLLECT COD|MAIL| accounts sleep at the bold, regular pinto |
+2791|63|4|2|4|3852.24|0.10|0.08|A|F|1995-01-02|1994-12-28|1995-01-29|NONE|SHIP|slyly bold packages boost. slyly|
+2791|133|9|3|44|45457.72|0.08|0.06|R|F|1994-11-17|1994-11-12|1994-12-14|NONE|FOB|heodolites use furio|
+2791|156|8|4|24|25347.60|0.04|0.02|R|F|1995-01-30|1994-11-20|1995-02-08|DELIVER IN PERSON|TRUCK|ilent forges. quickly special pinto beans |
+2791|105|2|5|8|8040.80|0.02|0.04|R|F|1995-01-30|1994-11-24|1995-02-13|NONE|FOB|se. close ideas alongs|
+2791|75|3|6|9|8775.63|0.08|0.02|R|F|1994-11-19|1994-12-14|1994-12-10|TAKE BACK RETURN|AIR|pendencies. blithely bold patterns acr|
+2791|29|2|7|26|24154.52|0.06|0.03|R|F|1995-02-06|1994-12-07|1995-02-23|DELIVER IN PERSON|AIR|uriously special instructio|
+2816|59|10|1|33|31648.65|0.00|0.07|R|F|1994-10-19|1994-11-10|1994-11-09|NONE|REG AIR|s; slyly even theodo|
+2816|142|3|2|4|4168.56|0.05|0.04|R|F|1994-12-11|1994-12-07|1995-01-03|NONE|FOB|. blithely pending id|
+2816|121|6|3|4|4084.48|0.02|0.06|R|F|1994-12-12|1994-12-05|1994-12-30|NONE|RAIL| requests print above the final deposits|
+2817|60|8|1|25|24001.50|0.07|0.01|R|F|1994-04-21|1994-06-20|1994-05-07|DELIVER IN PERSON|FOB|doze blithely.|
+2817|32|8|2|5|4660.15|0.03|0.04|A|F|1994-05-07|1994-05-31|1994-05-12|TAKE BACK RETURN|AIR|furiously unusual theodolites use furiou|
+2817|172|10|3|35|37525.95|0.01|0.07|A|F|1994-05-20|1994-06-03|1994-05-22|COLLECT COD|FOB|gular foxes|
+2817|161|2|4|4|4244.64|0.00|0.05|R|F|1994-06-04|1994-06-11|1994-06-10|NONE|TRUCK|n accounts wake across the fluf|
+2818|121|4|1|12|12253.44|0.10|0.03|A|F|1995-02-01|1995-03-10|1995-02-16|NONE|AIR|lms. quickly bold asymp|
+2818|199|2|2|22|24182.18|0.06|0.07|R|F|1995-02-28|1995-03-10|1995-03-06|TAKE BACK RETURN|RAIL|egrate toward the carefully iron|
+2818|45|6|3|11|10395.44|0.01|0.06|R|F|1995-02-18|1995-02-11|1995-03-19|TAKE BACK RETURN|TRUCK|ggle across the carefully blithe|
+2818|40|6|4|32|30081.28|0.08|0.08|R|F|1995-02-04|1995-03-05|1995-02-18|COLLECT COD|REG AIR|arefully! ac|
+2818|18|8|5|42|38556.42|0.08|0.04|A|F|1995-02-12|1995-02-19|1995-03-13|COLLECT COD|MAIL|ar accounts wake carefully a|
+2818|91|5|6|7|6937.63|0.06|0.03|R|F|1995-03-24|1995-03-09|1995-04-06|TAKE BACK RETURN|TRUCK|ly according to the r|
+2819|70|1|1|17|16491.19|0.08|0.08|A|F|1994-07-16|1994-07-15|1994-07-17|TAKE BACK RETURN|RAIL|en deposits above the f|
+2819|67|2|2|12|11604.72|0.03|0.08|R|F|1994-07-18|1994-06-24|1994-07-28|NONE|MAIL| regular, regular a|
+2819|5|2|3|28|25340.00|0.03|0.08|R|F|1994-05-09|1994-07-02|1994-05-15|NONE|RAIL|ckages sublate carefully closely regular |
+2819|153|4|4|5|5265.75|0.00|0.02|R|F|1994-05-29|1994-06-12|1994-06-28|NONE|TRUCK| fluffily unusual foxes sleep caref|
+2819|200|3|5|6|6601.20|0.03|0.01|A|F|1994-07-22|1994-08-02|1994-07-29|NONE|REG AIR|eas after the carefully express pack|
+2820|174|2|1|23|24705.91|0.04|0.08|R|F|1994-07-10|1994-08-08|1994-07-21|NONE|MAIL| was furiously. deposits among the ironic|
+2820|126|9|2|33|33861.96|0.08|0.06|A|F|1994-07-07|1994-08-17|1994-08-02|DELIVER IN PERSON|AIR|carefully even pinto beans. |
+2820|141|10|3|38|39563.32|0.03|0.08|A|F|1994-09-10|1994-08-07|1994-10-07|TAKE BACK RETURN|MAIL|ests despite the carefully unusual a|
+2820|197|9|4|40|43887.60|0.06|0.06|A|F|1994-08-08|1994-07-30|1994-08-21|TAKE BACK RETURN|REG AIR|g multipliers. final c|
+2821|181|2|1|4|4324.72|0.00|0.00|A|F|1993-09-15|1993-10-02|1993-09-17|TAKE BACK RETURN|TRUCK|nding foxes.|
+2821|72|1|2|4|3888.28|0.09|0.00|A|F|1993-11-19|1993-09-20|1993-11-27|TAKE BACK RETURN|TRUCK|ual multipliers. final deposits cajol|
+2821|164|1|3|27|28732.32|0.01|0.01|A|F|1993-11-27|1993-10-11|1993-12-08|COLLECT COD|TRUCK|requests. blit|
+2822|151|9|1|39|40994.85|0.04|0.02|R|F|1993-09-11|1993-08-29|1993-09-18|NONE|MAIL|kly about the sly|
+2823|86|7|1|45|44373.60|0.03|0.04|N|O|1995-12-28|1995-11-27|1996-01-02|DELIVER IN PERSON|SHIP|furiously special idea|
+2823|160|5|2|18|19082.88|0.00|0.03|N|O|1995-11-11|1995-10-30|1995-12-08|TAKE BACK RETURN|TRUCK| final deposits. furiously regular foxes u|
+2823|186|7|3|11|11947.98|0.07|0.02|N|O|1995-12-10|1995-11-24|1995-12-21|DELIVER IN PERSON|SHIP|bold requests nag blithely s|
+2823|139|10|4|48|49878.24|0.09|0.03|N|O|1995-11-21|1995-10-30|1995-11-27|NONE|SHIP|ously busily slow excus|
+2823|99|2|5|18|17983.62|0.04|0.06|N|O|1995-11-09|1995-10-30|1995-11-19|NONE|AIR|eas. decoys cajole deposi|
+2823|123|2|6|20|20462.40|0.07|0.00|N|O|1995-11-13|1995-12-06|1995-12-07|NONE|MAIL|its sleep between the unusual, ironic pac|
+2823|86|7|7|12|11832.96|0.02|0.04|N|O|1995-12-22|1995-11-20|1996-01-13|NONE|REG AIR|the slyly ironic dolphins; fin|
+2848|65|4|1|44|42462.64|0.01|0.05|R|F|1992-04-14|1992-05-09|1992-04-19|DELIVER IN PERSON|MAIL|ions. slyly express instructions n|
+2848|165|6|2|8|8521.28|0.07|0.01|A|F|1992-03-21|1992-05-18|1992-04-07|DELIVER IN PERSON|TRUCK|. silent, final ideas sublate packages. ir|
+2848|138|4|3|8|8305.04|0.07|0.08|A|F|1992-06-20|1992-04-12|1992-07-09|NONE|SHIP|sly regular foxes. |
+2848|125|6|4|34|34854.08|0.02|0.08|A|F|1992-03-15|1992-04-24|1992-04-12|TAKE BACK RETURN|RAIL|ts along the blithely regu|
+2848|195|7|5|18|19713.42|0.07|0.03|R|F|1992-04-10|1992-06-01|1992-05-05|DELIVER IN PERSON|TRUCK|osits haggle. stealthily ironic packa|
+2849|154|2|1|16|16866.40|0.09|0.08|N|O|1996-05-20|1996-07-23|1996-06-18|NONE|TRUCK|. furiously regular requ|
+2849|187|8|2|39|42400.02|0.10|0.03|N|O|1996-05-22|1996-07-18|1996-06-05|TAKE BACK RETURN|SHIP|s sleep furiously silently regul|
+2849|60|1|3|24|23041.44|0.01|0.05|N|O|1996-06-12|1996-07-10|1996-06-27|TAKE BACK RETURN|AIR|e slyly even asymptotes. slo|
+2849|55|7|4|48|45842.40|0.05|0.02|N|O|1996-05-03|1996-06-05|1996-05-28|NONE|AIR|mong the carefully regular theodol|
+2849|28|7|5|30|27840.60|0.10|0.06|N|O|1996-08-24|1996-07-08|1996-09-03|TAKE BACK RETURN|SHIP|ly. carefully silent|
+2849|69|4|6|30|29071.80|0.06|0.07|N|O|1996-06-20|1996-07-23|1996-07-06|NONE|FOB|yly furiously even id|
+2850|97|1|1|43|42874.87|0.02|0.05|N|O|1997-01-11|1996-11-03|1997-02-01|COLLECT COD|REG AIR|unusual accounts|
+2850|110|7|2|30|30303.30|0.09|0.01|N|O|1996-12-14|1996-11-29|1997-01-03|COLLECT COD|AIR|even ideas. busy pinto beans sleep above t|
+2850|105|6|3|49|49249.90|0.09|0.04|N|O|1996-10-07|1996-12-12|1996-10-12|TAKE BACK RETURN|MAIL| slyly unusual req|
+2850|199|3|4|4|4396.76|0.04|0.04|N|O|1996-10-28|1996-12-26|1996-11-07|COLLECT COD|RAIL|al deposits cajole carefully quickly |
+2851|148|5|1|8|8385.12|0.09|0.03|N|O|1997-11-12|1997-11-22|1997-12-11|NONE|REG AIR|y special theodolites. carefully|
+2852|177|6|1|6|6463.02|0.01|0.01|R|F|1993-03-02|1993-04-11|1993-03-11|TAKE BACK RETURN|RAIL| accounts above the furiously un|
+2852|41|10|2|24|22584.96|0.05|0.07|R|F|1993-01-18|1993-03-13|1993-02-14|DELIVER IN PERSON|MAIL| the blithe|
+2852|164|9|3|29|30860.64|0.09|0.05|R|F|1993-04-21|1993-03-22|1993-05-02|COLLECT COD|SHIP|lyly ironi|
+2852|100|3|4|12|12001.20|0.08|0.02|A|F|1993-02-25|1993-03-24|1993-03-07|TAKE BACK RETURN|TRUCK|le. request|
+2852|154|2|5|28|29516.20|0.05|0.03|R|F|1993-02-08|1993-03-30|1993-02-11|NONE|MAIL|e accounts. caref|
+2853|139|5|1|14|14547.82|0.07|0.05|R|F|1994-05-16|1994-07-01|1994-05-27|NONE|TRUCK|oach slyly along t|
+2853|134|10|2|26|26887.38|0.06|0.01|R|F|1994-06-26|1994-06-05|1994-07-02|TAKE BACK RETURN|MAIL|dolphins wake slyly. blith|
+2853|173|3|3|40|42926.80|0.06|0.04|A|F|1994-08-06|1994-06-24|1994-08-29|NONE|RAIL|lyly. pearls cajole. final accounts ca|
+2853|132|8|4|20|20642.60|0.02|0.04|A|F|1994-08-30|1994-06-16|1994-09-06|TAKE BACK RETURN|TRUCK|e slyly silent foxes. express deposits sno|
+2853|36|7|5|1|936.03|0.08|0.05|R|F|1994-09-01|1994-06-27|1994-09-12|TAKE BACK RETURN|FOB|refully slyly quick packages. final c|
+2854|181|2|1|46|49734.28|0.00|0.04|A|F|1994-09-22|1994-08-02|1994-09-30|COLLECT COD|AIR|. furiously regular deposits across th|
+2854|88|9|2|29|28654.32|0.09|0.07|R|F|1994-07-06|1994-08-26|1994-07-09|COLLECT COD|SHIP|y slyly ironic accounts. foxes haggle slyl|
+2854|160|8|3|20|21203.20|0.08|0.01|R|F|1994-09-18|1994-08-03|1994-10-12|COLLECT COD|AIR|rs impress after the deposits. |
+2854|170|1|4|34|36385.78|0.06|0.03|A|F|1994-09-06|1994-08-07|1994-09-22|NONE|REG AIR|age carefully|
+2854|102|3|5|7|7014.70|0.03|0.06|A|F|1994-09-23|1994-08-14|1994-10-10|DELIVER IN PERSON|REG AIR| the pending|
+2854|18|2|6|13|11934.13|0.04|0.03|R|F|1994-09-15|1994-08-18|1994-09-19|DELIVER IN PERSON|SHIP| excuses wak|
+2855|33|4|1|50|46651.50|0.03|0.07|A|F|1993-05-20|1993-06-28|1993-06-16|TAKE BACK RETURN|TRUCK|beans. deposits |
+2880|35|6|1|40|37401.20|0.09|0.00|A|F|1992-05-26|1992-06-01|1992-05-31|COLLECT COD|TRUCK|even requests. quick|
+2880|139|5|2|26|27017.38|0.07|0.07|R|F|1992-04-12|1992-04-15|1992-04-28|NONE|RAIL|ully among the regular warthogs|
+2880|115|9|3|42|42634.62|0.01|0.01|R|F|1992-06-17|1992-05-29|1992-07-11|NONE|REG AIR|ions. carefully final accounts are unusual,|
+2880|18|2|4|46|42228.46|0.02|0.02|A|F|1992-04-21|1992-06-05|1992-05-16|COLLECT COD|RAIL|eep quickly according to t|
+2881|180|10|1|16|17282.88|0.02|0.06|A|F|1992-06-21|1992-06-27|1992-07-03|TAKE BACK RETURN|TRUCK|usly bold |
+2881|10|1|2|1|910.01|0.09|0.03|A|F|1992-05-13|1992-07-21|1992-05-18|COLLECT COD|MAIL|final theodolites. quickly|
+2881|93|6|3|21|20854.89|0.07|0.03|A|F|1992-05-28|1992-07-03|1992-06-02|TAKE BACK RETURN|SHIP|hely express Tiresias. final dependencies |
+2881|140|6|4|7|7280.98|0.06|0.01|R|F|1992-08-03|1992-07-10|1992-08-27|NONE|REG AIR|ironic packages are carefully final ac|
+2882|4|7|1|14|12656.00|0.09|0.02|N|O|1995-09-28|1995-11-11|1995-10-18|TAKE BACK RETURN|MAIL|kly. even requests w|
+2882|42|1|2|30|28261.20|0.00|0.00|N|O|1995-10-15|1995-10-13|1995-10-25|NONE|REG AIR|among the furiously even theodolites. regu|
+2882|197|9|3|29|31818.51|0.10|0.08|N|O|1995-09-10|1995-11-01|1995-10-02|NONE|TRUCK|kages. furiously ironic|
+2882|78|6|4|27|26407.89|0.06|0.02|N|O|1995-09-04|1995-11-11|1995-09-12|DELIVER IN PERSON|MAIL|rding to the regu|
+2882|134|5|5|32|33092.16|0.07|0.03|N|O|1995-10-21|1995-11-10|1995-11-01|COLLECT COD|RAIL|sts. quickly regular e|
+2882|87|8|6|47|46392.76|0.06|0.03|N|O|1995-09-13|1995-09-21|1995-09-14|NONE|REG AIR|l, special|
+2883|1|4|1|33|29733.00|0.08|0.07|R|F|1995-02-26|1995-03-04|1995-03-01|NONE|RAIL|s. final i|
+2883|125|6|2|27|27678.24|0.00|0.02|A|F|1995-03-12|1995-03-10|1995-04-04|TAKE BACK RETURN|REG AIR|s. brave pinto beans nag furiously|
+2883|189|10|3|47|51191.46|0.05|0.04|R|F|1995-01-29|1995-04-19|1995-02-05|DELIVER IN PERSON|SHIP|ep carefully ironic|
+2883|98|2|4|23|22956.07|0.00|0.02|R|F|1995-02-03|1995-03-17|1995-02-19|TAKE BACK RETURN|AIR| even requests cajole. special, regular |
+2883|195|8|5|36|39426.84|0.07|0.06|A|F|1995-05-02|1995-03-14|1995-05-30|COLLECT COD|MAIL|ests detect slyly special packages|
+2884|71|2|1|41|39813.87|0.03|0.00|N|O|1998-01-02|1997-12-17|1998-01-20|DELIVER IN PERSON|TRUCK|ep. slyly even accounts a|
+2884|146|5|2|25|26153.50|0.09|0.08|N|O|1998-01-18|1997-12-06|1998-02-16|TAKE BACK RETURN|MAIL|onic theodolites with the instructi|
+2884|26|7|3|8|7408.16|0.08|0.08|N|O|1997-11-30|1997-11-28|1997-12-14|COLLECT COD|TRUCK|pending accounts about |
+2885|4|9|1|6|5424.00|0.10|0.01|A|F|1993-01-05|1992-12-12|1993-01-19|COLLECT COD|FOB|ctions solve. slyly regular requests n|
+2885|112|3|2|4|4048.44|0.07|0.00|A|F|1992-10-09|1992-12-17|1992-11-04|TAKE BACK RETURN|SHIP| pending packages wake. |
+2885|1|6|3|45|40545.00|0.10|0.04|A|F|1992-12-24|1992-10-30|1993-01-04|NONE|SHIP|ess ideas. regular, silen|
+2885|32|3|4|15|13980.45|0.03|0.04|R|F|1992-10-31|1992-11-24|1992-11-21|DELIVER IN PERSON|MAIL|odolites. boldly pending packages han|
+2885|175|5|5|43|46232.31|0.06|0.00|R|F|1992-11-17|1992-10-30|1992-12-04|DELIVER IN PERSON|SHIP|cial deposits use bold|
+2885|190|1|6|5|5450.95|0.01|0.02|R|F|1993-01-06|1992-11-13|1993-02-05|TAKE BACK RETURN|TRUCK|s. slyly express th|
+2885|50|9|7|40|38002.00|0.05|0.03|A|F|1992-09-23|1992-11-15|1992-10-07|TAKE BACK RETURN|AIR| express depos|
+2886|60|1|1|1|960.06|0.09|0.05|A|F|1995-02-01|1994-12-18|1995-02-28|COLLECT COD|REG AIR|eposits fr|
+2886|184|5|2|38|41198.84|0.02|0.04|A|F|1995-01-21|1995-01-08|1995-01-30|NONE|SHIP|old requests along the fur|
+2886|63|8|3|2|1926.12|0.04|0.07|A|F|1994-11-18|1995-01-31|1994-12-05|COLLECT COD|REG AIR|ar theodolites. e|
+2886|130|3|4|46|47385.98|0.03|0.08|A|F|1995-02-02|1995-01-26|1995-02-15|TAKE BACK RETURN|SHIP|ously final packages sleep blithely regular|
+2887|66|3|1|11|10626.66|0.06|0.00|N|O|1997-07-08|1997-07-17|1997-07-15|COLLECT COD|SHIP|ackages. unusual, speci|
+2887|112|6|2|17|17205.87|0.00|0.08|N|O|1997-08-31|1997-07-04|1997-09-17|DELIVER IN PERSON|SHIP|fily final packages. regula|
+2912|122|1|1|8|8176.96|0.06|0.04|A|F|1992-04-09|1992-04-19|1992-04-26|NONE|RAIL|hs cajole over the slyl|
+2912|115|9|2|18|18271.98|0.00|0.08|R|F|1992-03-13|1992-04-19|1992-03-30|TAKE BACK RETURN|RAIL|unts cajole reg|
+2913|123|6|1|39|39901.68|0.06|0.04|N|O|1997-08-28|1997-09-27|1997-09-02|TAKE BACK RETURN|AIR|. final packages a|
+2913|22|5|2|22|20284.44|0.10|0.07|N|O|1997-09-18|1997-08-11|1997-10-02|COLLECT COD|MAIL|riously pending realms. blithely even pac|
+2913|166|1|3|17|18124.72|0.07|0.04|N|O|1997-10-21|1997-09-25|1997-11-20|NONE|FOB|requests doze quickly. furious|
+2913|143|4|4|5|5215.70|0.10|0.07|N|O|1997-10-07|1997-08-25|1997-10-09|TAKE BACK RETURN|RAIL|haggle. even, bold instructi|
+2913|15|9|5|13|11895.13|0.03|0.01|N|O|1997-10-02|1997-08-20|1997-10-26|COLLECT COD|MAIL|inos are carefully alongside of the bol|
+2913|168|5|6|35|37385.60|0.06|0.08|N|O|1997-08-30|1997-08-21|1997-09-03|COLLECT COD|MAIL|es. quickly even braids against|
+2914|66|7|1|22|21253.32|0.05|0.06|R|F|1993-05-11|1993-04-09|1993-05-22|DELIVER IN PERSON|FOB| carefully about the fluffily ironic gifts|
+2914|163|10|2|25|26579.00|0.03|0.04|A|F|1993-05-14|1993-04-04|1993-05-22|NONE|SHIP|cross the carefully even accounts.|
+2914|35|1|3|4|3740.12|0.00|0.05|R|F|1993-06-11|1993-04-09|1993-06-14|TAKE BACK RETURN|SHIP|s integrate. bold deposits sleep req|
+2914|121|2|4|9|9190.08|0.06|0.01|R|F|1993-06-17|1993-05-26|1993-06-19|NONE|REG AIR|s. carefully final foxes ar|
+2915|175|5|1|28|30104.76|0.10|0.02|R|F|1994-04-17|1994-06-09|1994-05-10|NONE|MAIL|yly special |
+2915|94|7|2|12|11929.08|0.00|0.03|A|F|1994-07-18|1994-06-11|1994-07-27|TAKE BACK RETURN|RAIL|accounts. slyly final|
+2915|136|2|3|15|15541.95|0.07|0.00|A|F|1994-05-01|1994-06-12|1994-05-15|DELIVER IN PERSON|TRUCK|al requests haggle furiousl|
+2915|81|2|4|43|42186.44|0.06|0.05|R|F|1994-06-02|1994-05-24|1994-06-06|DELIVER IN PERSON|SHIP|into beans dazzle alongside of|
+2916|83|4|1|21|20644.68|0.06|0.04|N|O|1996-03-11|1996-02-21|1996-03-30|NONE|REG AIR|uickly express ideas over the slyly even |
+2917|93|4|1|36|35751.24|0.10|0.01|N|O|1998-04-07|1998-02-23|1998-05-01|DELIVER IN PERSON|RAIL|usly ironic d|
+2917|21|2|2|20|18420.40|0.06|0.03|N|O|1997-12-31|1998-01-22|1998-01-12|NONE|MAIL|slyly even ideas wa|
+2917|90|1|3|4|3960.36|0.02|0.07|N|O|1998-01-10|1998-01-18|1998-02-08|TAKE BACK RETURN|REG AIR|s. unusual instruct|
+2917|167|2|4|5|5335.80|0.05|0.01|N|O|1997-12-16|1998-01-26|1998-01-07|NONE|RAIL|bove the furiously silent packages. pend|
+2917|41|10|5|37|34818.48|0.04|0.01|N|O|1997-12-12|1998-02-03|1997-12-23|COLLECT COD|RAIL|dependencies. express |
+2917|194|8|6|7|7659.33|0.05|0.01|N|O|1998-03-21|1998-03-03|1998-03-25|NONE|REG AIR|ly about the regular accounts. carefully pe|
+2918|78|7|1|24|23473.68|0.10|0.03|N|O|1996-12-20|1996-10-28|1996-12-26|DELIVER IN PERSON|FOB| quickly. express requests haggle careful|
+2919|102|5|1|2|2004.20|0.03|0.05|R|F|1993-12-28|1994-02-23|1994-01-18|COLLECT COD|TRUCK|re slyly. regular ideas detect furiousl|
+2919|121|4|2|49|50034.88|0.07|0.02|R|F|1993-12-16|1994-02-28|1993-12-19|COLLECT COD|FOB|hely final inst|
+2919|46|5|3|44|41625.76|0.07|0.07|A|F|1994-04-01|1994-01-12|1994-04-07|TAKE BACK RETURN|TRUCK|final ideas haggle carefully fluff|
+2919|102|5|4|44|44092.40|0.00|0.05|R|F|1994-02-04|1994-02-03|1994-03-02|TAKE BACK RETURN|AIR|es doze around the furiously |
+2944|120|1|1|44|44885.28|0.08|0.05|N|O|1997-12-25|1997-10-28|1998-01-21|COLLECT COD|AIR|ickly special theodolit|
+2944|42|9|2|44|41449.76|0.06|0.02|N|O|1997-10-28|1997-11-22|1997-11-10|NONE|SHIP|ickly. regular requests haggle. idea|
+2944|170|5|3|2|2140.34|0.06|0.07|N|O|1997-12-13|1997-12-01|1998-01-08|DELIVER IN PERSON|REG AIR|luffily expr|
+2944|17|7|4|23|21091.23|0.02|0.03|N|O|1998-01-12|1997-12-03|1998-01-17|TAKE BACK RETURN|MAIL| excuses? regular platelets e|
+2944|75|4|5|18|17551.26|0.10|0.01|N|O|1998-01-07|1997-10-26|1998-01-27|TAKE BACK RETURN|FOB| furiously slyl|
+2944|60|2|6|17|16321.02|0.00|0.03|N|O|1997-10-18|1997-11-27|1997-10-29|TAKE BACK RETURN|SHIP|slyly final dolphins sleep silent the|
+2944|90|1|7|7|6930.63|0.01|0.06|N|O|1997-10-30|1997-11-03|1997-11-03|DELIVER IN PERSON|FOB|fluffily blithely express pea|
+2945|59|10|1|37|35484.85|0.00|0.02|N|O|1996-02-10|1996-03-20|1996-02-12|COLLECT COD|SHIP|l instructions. regular, regular |
+2945|72|2|2|30|29162.10|0.05|0.01|N|O|1996-01-19|1996-02-11|1996-01-26|NONE|TRUCK|ular instructions|
+2945|127|8|3|28|28759.36|0.06|0.02|N|O|1996-03-17|1996-03-13|1996-04-15|COLLECT COD|FOB|le slyly along the eve|
+2945|188|9|4|34|36998.12|0.08|0.06|N|O|1996-02-03|1996-03-17|1996-02-29|COLLECT COD|REG AIR|at the unusual theodolite|
+2945|173|1|5|10|10731.70|0.09|0.05|N|O|1996-03-13|1996-03-10|1996-04-06|COLLECT COD|FOB|thely. final courts could hang qu|
+2945|97|9|6|45|44869.05|0.07|0.00|N|O|1996-03-01|1996-03-25|1996-03-08|TAKE BACK RETURN|MAIL|ainst the final packages|
+2945|52|10|7|47|44746.35|0.07|0.05|N|O|1996-01-05|1996-02-11|1996-01-12|DELIVER IN PERSON|MAIL|quests use|
+2946|10|5|1|25|22750.25|0.05|0.02|N|O|1996-05-06|1996-04-23|1996-05-16|DELIVER IN PERSON|SHIP|ic deposits. furiously|
+2946|94|5|2|48|47716.32|0.03|0.07|N|O|1996-06-02|1996-03-31|1996-06-16|COLLECT COD|TRUCK|oss the platelets. furi|
+2946|3|6|3|35|31605.00|0.03|0.00|N|O|1996-03-15|1996-04-02|1996-03-26|NONE|REG AIR| sublate along the fluffily iron|
+2947|10|1|1|37|33670.37|0.09|0.07|N|O|1995-08-09|1995-07-05|1995-08-20|DELIVER IN PERSON|RAIL|e accounts: expres|
+2947|186|7|2|10|10861.80|0.09|0.07|A|F|1995-06-07|1995-06-26|1995-06-08|NONE|MAIL|lly special |
+2948|118|9|1|48|48869.28|0.00|0.04|R|F|1994-08-29|1994-10-23|1994-09-23|NONE|TRUCK|unusual excuses use about the |
+2948|92|3|2|49|48612.41|0.04|0.07|R|F|1994-12-16|1994-11-08|1995-01-07|DELIVER IN PERSON|MAIL|ress requests. furiously blithe foxes |
+2949|21|6|1|4|3684.08|0.06|0.06|A|F|1994-06-07|1994-06-17|1994-07-04|TAKE BACK RETURN|REG AIR|gular pinto beans wake alongside of the reg|
+2949|70|5|2|50|48503.50|0.05|0.04|A|F|1994-08-04|1994-06-23|1994-08-17|TAKE BACK RETURN|FOB|gular courts cajole across t|
+2949|180|9|3|38|41046.84|0.02|0.06|R|F|1994-05-22|1994-05-25|1994-05-27|COLLECT COD|REG AIR|se slyly requests. carefull|
+2950|130|1|1|32|32964.16|0.01|0.05|N|O|1997-09-21|1997-08-25|1997-10-08|DELIVER IN PERSON|REG AIR|its wake carefully slyly final ideas.|
+2950|66|7|2|18|17389.08|0.10|0.01|N|O|1997-07-19|1997-08-29|1997-08-17|COLLECT COD|TRUCK|uests cajole furio|
+2950|53|4|3|14|13342.70|0.01|0.02|N|O|1997-07-29|1997-08-05|1997-07-31|TAKE BACK RETURN|MAIL|ccounts haggle carefully according |
+2950|187|8|4|45|48923.10|0.08|0.00|N|O|1997-09-05|1997-09-23|1997-09-11|NONE|FOB|ides the b|
+2950|61|2|5|46|44208.76|0.02|0.05|N|O|1997-07-15|1997-09-30|1997-07-25|COLLECT COD|RAIL|to the regular accounts are slyly carefu|
+2950|174|5|6|27|29002.59|0.01|0.03|N|O|1997-10-01|1997-09-13|1997-10-08|NONE|TRUCK|are alongside of the carefully silent |
+2951|3|8|1|5|4515.00|0.03|0.03|N|O|1996-03-27|1996-04-16|1996-03-30|NONE|REG AIR|to beans wake ac|
+2951|136|2|2|24|24867.12|0.07|0.03|N|O|1996-03-24|1996-04-16|1996-04-08|NONE|SHIP| ironic multipliers. express, regular|
+2951|187|8|3|40|43487.20|0.02|0.07|N|O|1996-05-03|1996-04-20|1996-05-22|COLLECT COD|REG AIR|ial deposits wake fluffily about th|
+2951|73|3|4|21|20434.47|0.06|0.08|N|O|1996-04-12|1996-04-27|1996-04-14|DELIVER IN PERSON|REG AIR|nt instructions toward the f|
+2951|51|6|5|15|14265.75|0.07|0.00|N|O|1996-03-25|1996-04-23|1996-03-27|COLLECT COD|REG AIR|inal account|
+2951|138|4|6|18|18686.34|0.06|0.00|N|O|1996-04-04|1996-04-27|1996-04-06|COLLECT COD|FOB|ep about the final, even package|
+2976|9|4|1|32|29088.00|0.06|0.00|A|F|1994-01-26|1994-02-13|1994-02-10|NONE|MAIL|nding, ironic deposits sleep f|
+2976|4|5|2|24|21696.00|0.00|0.03|A|F|1994-03-19|1994-01-26|1994-04-18|COLLECT COD|TRUCK|ronic pinto beans. slyly bol|
+2976|10|5|3|35|31850.35|0.10|0.07|R|F|1993-12-19|1994-02-14|1994-01-11|NONE|RAIL|boost slyly about the regular, regular re|
+2976|82|3|4|22|21605.76|0.00|0.04|A|F|1994-02-08|1994-03-03|1994-02-12|TAKE BACK RETURN|FOB|ncies kindle furiously. carefull|
+2976|134|5|5|13|13443.69|0.00|0.06|A|F|1994-02-06|1994-02-02|1994-02-19|NONE|FOB| furiously final courts boost |
+2976|109|2|6|30|30273.00|0.08|0.03|R|F|1994-03-27|1994-02-01|1994-04-26|TAKE BACK RETURN|RAIL|c ideas! unusual|
+2977|70|5|1|25|24251.75|0.03|0.07|N|O|1996-09-21|1996-10-06|1996-10-13|TAKE BACK RETURN|RAIL|furiously pe|
+2978|90|1|1|29|28712.61|0.00|0.08|A|F|1995-06-03|1995-07-25|1995-06-06|NONE|SHIP|ecial ideas promise slyly|
+2978|127|2|2|42|43139.04|0.01|0.06|N|O|1995-08-19|1995-07-18|1995-09-07|DELIVER IN PERSON|MAIL|ial requests nag blithely alongside of th|
+2978|43|2|3|26|24519.04|0.07|0.05|N|O|1995-07-29|1995-07-22|1995-08-20|COLLECT COD|REG AIR|as haggle against the carefully express dep|
+2978|28|1|4|7|6496.14|0.00|0.00|N|O|1995-07-18|1995-07-03|1995-07-23|NONE|FOB|. final ideas are blithe|
+2978|29|2|5|33|30657.66|0.09|0.03|R|F|1995-05-06|1995-07-23|1995-05-16|COLLECT COD|FOB|s. blithely unusual pack|
+2978|168|7|6|4|4272.64|0.08|0.04|N|O|1995-07-06|1995-07-31|1995-07-19|COLLECT COD|AIR|ffily unusual |
+2979|9|6|1|8|7272.00|0.00|0.08|N|O|1996-06-18|1996-05-21|1996-07-06|COLLECT COD|REG AIR|st blithely; blithely regular gifts dazz|
+2979|11|2|2|47|42817.47|0.05|0.00|N|O|1996-03-25|1996-05-13|1996-04-04|TAKE BACK RETURN|SHIP|iously unusual dependencies wake across|
+2979|188|9|3|35|38086.30|0.04|0.03|N|O|1996-05-25|1996-06-11|1996-06-24|DELIVER IN PERSON|MAIL|old ideas beneath the blit|
+2979|165|4|4|28|29824.48|0.05|0.08|N|O|1996-06-04|1996-04-23|1996-06-24|DELIVER IN PERSON|FOB|ing, regular pinto beans. blithel|
+2980|37|3|1|2|1874.06|0.09|0.03|N|O|1996-11-18|1996-10-22|1996-11-27|TAKE BACK RETURN|SHIP|enly across the special, pending packag|
+2980|10|7|2|48|43680.48|0.04|0.05|N|O|1996-09-25|1996-12-09|1996-10-12|NONE|REG AIR|totes. regular pinto |
+2980|133|9|3|27|27894.51|0.08|0.08|N|O|1996-12-08|1996-12-03|1996-12-14|NONE|REG AIR| theodolites cajole blithely sl|
+2980|25|10|4|49|45325.98|0.03|0.02|N|O|1996-10-04|1996-12-04|1996-10-06|NONE|RAIL|hy packages sleep quic|
+2980|187|8|5|24|26092.32|0.05|0.04|N|O|1997-01-12|1996-10-27|1997-01-14|NONE|MAIL|elets. fluffily regular in|
+2980|109|4|6|43|43391.30|0.01|0.01|N|O|1996-12-07|1996-11-10|1997-01-02|COLLECT COD|AIR|sts. slyly regu|
+2981|14|4|1|17|15538.17|0.03|0.05|N|O|1998-10-17|1998-10-02|1998-10-21|DELIVER IN PERSON|RAIL|, unusual packages x-ray. furious|
+2981|176|4|2|8|8609.36|0.06|0.03|N|O|1998-08-21|1998-09-28|1998-09-05|DELIVER IN PERSON|MAIL|ng to the f|
+2981|37|3|3|14|13118.42|0.03|0.07|N|O|1998-08-30|1998-10-04|1998-09-04|DELIVER IN PERSON|MAIL|kages detect furiously express requests.|
+2982|112|6|1|21|21254.31|0.00|0.01|A|F|1995-04-03|1995-06-08|1995-04-18|DELIVER IN PERSON|AIR|ironic deposits. furiously ex|
+2982|99|2|2|13|12988.17|0.02|0.08|R|F|1995-03-31|1995-05-07|1995-04-18|TAKE BACK RETURN|RAIL|regular deposits unwind alongside |
+2982|70|5|3|21|20371.47|0.01|0.01|R|F|1995-04-19|1995-06-03|1995-04-28|COLLECT COD|SHIP|egular ideas use furiously? bl|
+2983|163|4|1|44|46779.04|0.03|0.06|R|F|1992-02-09|1992-03-07|1992-03-09|TAKE BACK RETURN|AIR|ly regular instruct|
+2983|49|8|2|11|10439.44|0.09|0.06|A|F|1992-04-29|1992-02-27|1992-05-26|NONE|MAIL|aids integrate s|
+3008|132|3|1|8|8257.04|0.10|0.04|N|O|1995-12-06|1996-01-12|1995-12-22|TAKE BACK RETURN|FOB|yly ironic foxes. regular requests h|
+3008|200|3|2|31|34106.20|0.05|0.06|N|O|1995-12-14|1995-12-11|1995-12-31|TAKE BACK RETURN|AIR| bold packages. quic|
+3008|24|5|3|40|36960.80|0.01|0.03|N|O|1995-12-18|1996-01-06|1996-01-11|COLLECT COD|AIR|esias. theodolites detect blithely |
+3008|60|1|4|48|46082.88|0.07|0.06|N|O|1996-01-23|1996-01-07|1996-02-09|COLLECT COD|SHIP|ld theodolites. fluffily bold theodolit|
+3008|105|10|5|31|31158.10|0.03|0.02|N|O|1995-12-01|1996-01-20|1995-12-28|COLLECT COD|RAIL|nts use thinly around the carefully iro|
+3009|45|8|1|48|45361.92|0.10|0.02|N|O|1997-03-19|1997-05-13|1997-04-11|TAKE BACK RETURN|TRUCK| dependencies sleep quickly a|
+3009|185|6|2|38|41236.84|0.00|0.01|N|O|1997-05-01|1997-04-10|1997-05-17|TAKE BACK RETURN|AIR|nal packages should haggle slyly. quickl|
+3009|130|3|3|26|26783.38|0.08|0.02|N|O|1997-05-15|1997-05-10|1997-06-13|TAKE BACK RETURN|SHIP|uriously specia|
+3010|138|4|1|23|23876.99|0.04|0.00|N|O|1996-03-08|1996-02-29|1996-03-27|NONE|TRUCK|ounts. pendin|
+3010|174|4|2|22|23631.74|0.09|0.06|N|O|1996-03-06|1996-04-06|1996-03-18|COLLECT COD|REG AIR| final deposit|
+3010|58|6|3|24|22993.20|0.04|0.07|N|O|1996-05-09|1996-03-14|1996-05-15|DELIVER IN PERSON|RAIL|ar, even reques|
+3010|24|7|4|28|25872.56|0.09|0.06|N|O|1996-03-05|1996-03-28|1996-04-03|DELIVER IN PERSON|FOB|ake carefully carefully even request|
+3010|104|5|5|9|9036.90|0.02|0.02|N|O|1996-04-28|1996-03-17|1996-05-18|NONE|SHIP|inal packages. quickly even pinto|
+3010|92|3|6|38|37699.42|0.05|0.07|N|O|1996-04-15|1996-03-16|1996-04-21|DELIVER IN PERSON|RAIL|accounts ar|
+3011|198|10|1|5|5490.95|0.02|0.04|R|F|1992-04-21|1992-02-23|1992-05-15|NONE|TRUCK|nusual sentiments. carefully bold idea|
+3011|123|4|2|42|42971.04|0.05|0.00|A|F|1992-02-01|1992-03-18|1992-02-29|NONE|TRUCK|osits haggle quickly pending, |
+3012|195|7|1|49|53664.31|0.00|0.00|A|F|1993-08-07|1993-07-01|1993-08-08|NONE|MAIL| quickly furious packages. silently unusua|
+3012|161|2|2|37|39262.92|0.06|0.03|A|F|1993-08-16|1993-06-07|1993-08-24|TAKE BACK RETURN|REG AIR|uickly permanent packages sleep caref|
+3013|94|6|1|31|30816.79|0.08|0.08|N|O|1997-05-03|1997-04-05|1997-05-25|NONE|AIR|y furious depen|
+3013|139|5|2|30|31173.90|0.05|0.06|N|O|1997-05-02|1997-03-09|1997-05-12|TAKE BACK RETURN|MAIL|ronic packages. slyly even|
+3013|120|10|3|35|35704.20|0.00|0.03|N|O|1997-04-02|1997-05-04|1997-04-16|COLLECT COD|MAIL|ely accord|
+3013|181|2|4|17|18380.06|0.01|0.07|N|O|1997-02-26|1997-05-02|1997-03-27|DELIVER IN PERSON|SHIP|fully unusual account|
+3013|60|5|5|20|19201.20|0.00|0.04|N|O|1997-05-06|1997-03-18|1997-05-12|COLLECT COD|RAIL|unts boost regular ideas. slyly pe|
+3013|72|2|6|19|18469.33|0.08|0.07|N|O|1997-05-11|1997-04-18|1997-05-15|COLLECT COD|REG AIR|fluffily pending packages nag furiously al|
+3014|163|4|1|36|38273.76|0.05|0.03|A|F|1992-11-16|1993-01-20|1992-11-28|TAKE BACK RETURN|FOB|ding accounts boost fu|
+3014|106|1|2|36|36219.60|0.00|0.08|R|F|1992-12-28|1992-12-29|1993-01-24|COLLECT COD|MAIL|iously ironic r|
+3014|151|9|3|48|50455.20|0.06|0.02|A|F|1992-12-19|1993-01-08|1992-12-25|DELIVER IN PERSON|REG AIR|y pending theodolites wake. reg|
+3014|114|1|4|14|14197.54|0.10|0.02|R|F|1992-11-19|1993-01-01|1992-12-17|DELIVER IN PERSON|SHIP|. slyly brave platelets nag. careful,|
+3014|75|5|5|28|27301.96|0.02|0.08|R|F|1993-01-09|1992-12-18|1993-01-10|TAKE BACK RETURN|FOB|es are. final braids nag slyly. fluff|
+3014|38|4|6|30|28140.90|0.04|0.01|R|F|1993-02-28|1993-01-02|1993-03-20|TAKE BACK RETURN|AIR| final foxes.|
+3015|3|8|1|5|4515.00|0.09|0.00|A|F|1993-01-10|1992-12-02|1993-01-19|TAKE BACK RETURN|RAIL| the furiously pendi|
+3015|18|2|2|17|15606.17|0.03|0.01|R|F|1992-10-16|1992-11-20|1992-10-28|COLLECT COD|AIR|s above the fluffily final t|
+3015|91|4|3|23|22795.07|0.03|0.05|A|F|1992-12-03|1992-11-19|1992-12-23|DELIVER IN PERSON|FOB|s are slyly carefully special pinto bea|
+3015|156|7|4|7|7393.05|0.10|0.03|A|F|1992-12-07|1992-12-17|1992-12-30|DELIVER IN PERSON|REG AIR| after the evenly special packages ca|
+3015|165|4|5|42|44736.72|0.04|0.02|R|F|1993-01-21|1992-11-07|1993-02-11|DELIVER IN PERSON|AIR|encies haggle furious|
+3015|66|7|6|18|17389.08|0.02|0.03|R|F|1992-10-10|1992-11-19|1992-10-18|TAKE BACK RETURN|MAIL|equests wake fluffil|
+3040|16|6|1|18|16488.18|0.08|0.04|R|F|1993-06-25|1993-07-06|1993-07-19|TAKE BACK RETURN|SHIP|ly thin accou|
+3040|133|9|2|9|9298.17|0.00|0.01|A|F|1993-06-12|1993-05-16|1993-06-14|NONE|RAIL|ges. pending packages wake. requests|
+3040|126|5|3|30|30783.60|0.01|0.01|A|F|1993-08-06|1993-05-18|1993-08-19|NONE|MAIL|x furiously bold packages. expres|
+3040|83|4|4|14|13763.12|0.05|0.04|A|F|1993-05-13|1993-05-18|1993-05-19|TAKE BACK RETURN|REG AIR| haggle carefully. express hocke|
+3040|52|3|5|43|40938.15|0.04|0.04|R|F|1993-05-21|1993-05-25|1993-05-26|NONE|MAIL|sts nag slyly alongside of the depos|
+3040|18|5|6|10|9180.10|0.08|0.04|R|F|1993-05-16|1993-06-24|1993-06-11|DELIVER IN PERSON|MAIL|ely regular foxes haggle dari|
+3041|181|2|1|5|5405.90|0.07|0.04|N|O|1997-07-20|1997-07-15|1997-08-17|COLLECT COD|FOB|posits dazzle special p|
+3041|146|9|2|9|9415.26|0.03|0.03|N|O|1997-06-29|1997-08-14|1997-07-19|COLLECT COD|AIR|iously across the silent pinto beans. furi|
+3041|68|5|3|9|8712.54|0.09|0.06|N|O|1997-08-28|1997-07-23|1997-09-16|TAKE BACK RETURN|FOB|scapades after the special|
+3042|105|2|1|30|30153.00|0.08|0.06|A|F|1995-01-12|1995-02-15|1995-01-24|DELIVER IN PERSON|SHIP|the requests detect fu|
+3042|102|3|2|28|28058.80|0.05|0.03|A|F|1994-11-24|1995-01-02|1994-12-06|TAKE BACK RETURN|MAIL|ng the furiously r|
+3042|14|8|3|34|31076.34|0.04|0.00|R|F|1994-12-11|1995-02-03|1994-12-21|TAKE BACK RETURN|TRUCK|can wake after the enticingly stealthy i|
+3042|48|1|4|19|18012.76|0.02|0.01|A|F|1995-03-05|1995-01-24|1995-03-17|COLLECT COD|TRUCK|e carefully. regul|
+3043|46|9|1|23|21758.92|0.07|0.04|R|F|1992-05-08|1992-07-22|1992-05-18|COLLECT COD|TRUCK|uickly above the pending,|
+3043|6|3|2|15|13590.00|0.03|0.05|A|F|1992-05-27|1992-06-03|1992-06-09|COLLECT COD|FOB|usly furiously|
+3043|60|1|3|42|40322.52|0.10|0.07|R|F|1992-07-15|1992-06-19|1992-07-23|NONE|MAIL|ide of the un|
+3043|91|2|4|5|4955.45|0.10|0.01|A|F|1992-05-22|1992-07-02|1992-06-20|TAKE BACK RETURN|TRUCK|ake blithely re|
+3044|101|2|1|10|10011.00|0.07|0.08|N|O|1996-07-13|1996-05-06|1996-07-21|TAKE BACK RETURN|REG AIR| slyly ironic requests. s|
+3044|168|7|2|3|3204.48|0.06|0.02|N|O|1996-07-27|1996-05-26|1996-08-15|TAKE BACK RETURN|AIR|ecoys haggle furiously pending requests.|
+3044|19|3|3|47|43193.47|0.09|0.00|N|O|1996-05-24|1996-06-22|1996-05-30|NONE|REG AIR|ly around the car|
+3045|88|9|1|41|40511.28|0.05|0.01|N|O|1995-09-30|1995-11-24|1995-10-03|TAKE BACK RETURN|MAIL|ely final foxes. carefully ironic pinto b|
+3045|69|6|2|48|46514.88|0.02|0.03|N|O|1995-10-01|1995-12-16|1995-10-10|TAKE BACK RETURN|MAIL|ole quickly outside th|
+3046|74|5|1|44|42859.08|0.03|0.03|N|O|1996-03-03|1996-02-25|1996-04-01|NONE|AIR| are quickly. blithe|
+3046|54|5|2|46|43886.30|0.03|0.08|N|O|1996-03-22|1996-02-28|1996-04-07|TAKE BACK RETURN|AIR|sits sleep furious|
+3046|2|9|3|31|27962.00|0.03|0.07|N|O|1996-03-24|1996-01-30|1996-03-26|NONE|RAIL|y pending somas alongside of the slyly iro|
+3047|104|5|1|17|17069.70|0.08|0.02|N|O|1997-06-14|1997-04-20|1997-06-23|COLLECT COD|FOB|onic instruction|
+3047|14|1|2|23|21022.23|0.00|0.04|N|O|1997-05-20|1997-06-14|1997-05-28|TAKE BACK RETURN|REG AIR| slyly ironi|
+3072|57|9|1|6|5742.30|0.09|0.05|R|F|1994-02-09|1994-03-24|1994-02-28|DELIVER IN PERSON|REG AIR|gular requests abov|
+3072|108|3|2|36|36291.60|0.07|0.02|R|F|1994-04-14|1994-04-22|1994-05-06|COLLECT COD|AIR| theodolites. blithely e|
+3072|97|8|3|7|6979.63|0.04|0.07|R|F|1994-05-09|1994-03-31|1994-05-19|COLLECT COD|TRUCK|uests. ironic, ironic depos|
+3072|83|4|4|39|38340.12|0.05|0.08|A|F|1994-05-27|1994-04-20|1994-06-14|COLLECT COD|MAIL|es; slyly spe|
+3072|88|9|5|1|988.08|0.01|0.08|R|F|1994-02-26|1994-03-14|1994-03-19|NONE|AIR| slyly ironic attainments. car|
+3073|194|7|1|16|17507.04|0.07|0.01|R|F|1994-03-02|1994-03-23|1994-03-31|DELIVER IN PERSON|AIR|n requests. ironi|
+3073|22|5|2|47|43334.94|0.09|0.00|R|F|1994-03-26|1994-02-12|1994-04-21|NONE|REG AIR|eposits. fluffily|
+3073|87|8|3|10|9870.80|0.03|0.00|R|F|1994-02-11|1994-03-24|1994-02-26|COLLECT COD|FOB| furiously caref|
+3073|29|4|4|14|13006.28|0.09|0.07|R|F|1994-03-24|1994-04-01|1994-04-07|NONE|RAIL|ilently quiet epitaphs.|
+3073|41|10|5|25|23526.00|0.00|0.07|R|F|1994-04-14|1994-03-07|1994-04-22|NONE|TRUCK|nag asymptotes. pinto beans sleep |
+3073|147|8|6|39|40838.46|0.09|0.02|R|F|1994-05-01|1994-02-16|1994-05-12|DELIVER IN PERSON|AIR|lar excuses across the furiously even |
+3073|44|5|7|11|10384.44|0.08|0.07|A|F|1994-05-01|1994-03-06|1994-05-08|COLLECT COD|SHIP|instructions sleep according to the |
+3074|37|8|1|50|46851.50|0.08|0.08|A|F|1993-01-31|1992-12-15|1993-02-20|NONE|AIR|furiously pending requests haggle s|
+3074|139|5|2|39|40526.07|0.03|0.00|R|F|1992-12-08|1993-01-28|1992-12-09|DELIVER IN PERSON|TRUCK|iously throu|
+3075|9|6|1|39|35451.00|0.02|0.03|A|F|1994-06-10|1994-06-21|1994-06-20|NONE|FOB|ing deposits nag |
+3075|52|10|2|2|1904.10|0.07|0.08|R|F|1994-06-14|1994-06-10|1994-06-25|TAKE BACK RETURN|AIR|. unusual, unusual accounts haggle furious|
+3076|85|6|1|44|43343.52|0.00|0.05|A|F|1993-09-14|1993-10-04|1993-09-17|TAKE BACK RETURN|FOB| instructions h|
+3076|106|1|2|22|22134.20|0.08|0.00|A|F|1993-09-05|1993-09-10|1993-09-27|NONE|REG AIR|packages wake furiou|
+3076|5|8|3|31|28055.00|0.06|0.06|A|F|1993-08-10|1993-09-17|1993-08-17|TAKE BACK RETURN|SHIP|regular depos|
+3077|72|2|1|25|24301.75|0.06|0.01|N|O|1997-09-14|1997-10-16|1997-10-06|NONE|TRUCK|lent account|
+3077|91|3|2|40|39643.60|0.05|0.06|N|O|1997-10-22|1997-09-19|1997-11-19|DELIVER IN PERSON|AIR|to the enticing packag|
+3077|78|7|3|13|12714.91|0.03|0.07|N|O|1997-09-09|1997-10-15|1997-09-19|NONE|TRUCK|luffily close depende|
+3077|115|5|4|23|23347.53|0.03|0.02|N|O|1997-11-05|1997-09-16|1997-11-20|NONE|MAIL|lly. fluffily pending dinos across|
+3078|132|3|1|25|25803.25|0.01|0.03|A|F|1993-04-22|1993-05-01|1993-04-28|TAKE BACK RETURN|AIR|express dinos. carefully ironic|
+3078|78|8|2|21|20539.47|0.09|0.07|A|F|1993-03-20|1993-03-21|1993-04-01|COLLECT COD|AIR|e fluffily. |
+3079|70|5|1|20|19401.40|0.05|0.00|N|O|1997-10-18|1997-10-26|1997-11-14|NONE|RAIL|ets are according to the quickly dari|
+3079|117|1|2|38|38650.18|0.08|0.07|N|O|1997-11-07|1997-11-25|1997-12-06|NONE|RAIL|e carefully regular realms|
+3079|17|8|3|40|36680.40|0.02|0.08|N|O|1997-09-26|1997-12-11|1997-10-09|NONE|RAIL|ide of the pending, special deposi|
+3079|24|5|4|2|1848.04|0.00|0.08|N|O|1998-01-05|1997-11-17|1998-01-28|NONE|FOB|ly busy requests believ|
+3079|188|9|5|2|2176.36|0.10|0.00|N|O|1997-12-27|1997-10-25|1998-01-08|COLLECT COD|SHIP|y regular asymptotes doz|
+3079|166|1|6|46|49043.36|0.00|0.00|N|O|1997-11-19|1997-11-04|1997-11-25|DELIVER IN PERSON|REG AIR|es. final, regula|
+3104|51|6|1|20|19021.00|0.01|0.08|A|F|1993-12-31|1993-11-24|1994-01-12|DELIVER IN PERSON|REG AIR|s are. furiously s|
+3104|48|1|2|47|44557.88|0.02|0.05|A|F|1993-12-25|1993-11-02|1994-01-12|COLLECT COD|RAIL|ily daring acc|
+3104|63|4|3|11|10593.66|0.02|0.03|A|F|1993-10-05|1993-11-30|1993-10-27|NONE|TRUCK| special deposits u|
+3104|38|9|4|26|24388.78|0.02|0.08|R|F|1994-01-02|1993-12-05|1994-01-31|TAKE BACK RETURN|TRUCK|es boost carefully. slyly |
+3105|184|5|1|11|11925.98|0.01|0.06|N|O|1997-02-07|1997-02-09|1997-03-01|NONE|FOB|kly bold depths caj|
+3105|45|6|2|9|8505.36|0.08|0.08|N|O|1996-12-25|1997-02-04|1997-01-09|COLLECT COD|SHIP|es wake among t|
+3105|25|4|3|48|44400.96|0.02|0.05|N|O|1997-02-28|1997-01-31|1997-03-18|DELIVER IN PERSON|REG AIR|ending platelets wake carefully ironic inst|
+3105|91|5|4|23|22795.07|0.04|0.07|N|O|1997-03-08|1996-12-14|1997-03-18|COLLECT COD|REG AIR| detect slyly. blithely unusual requests ar|
+3105|90|1|5|8|7920.72|0.07|0.07|N|O|1996-12-28|1996-12-28|1997-01-25|NONE|FOB|s. blithely unusual ideas was after|
+3105|47|6|6|30|28411.20|0.08|0.05|N|O|1997-03-03|1997-02-03|1997-03-05|NONE|FOB|ess accounts boost among t|
+3106|86|7|1|22|21693.76|0.03|0.02|N|O|1997-02-28|1997-02-12|1997-03-03|DELIVER IN PERSON|FOB|structions atop the blithely|
+3106|136|2|2|49|50770.37|0.06|0.06|N|O|1997-02-27|1997-03-11|1997-03-12|NONE|TRUCK|lets. quietly regular courts |
+3106|52|7|3|42|39986.10|0.09|0.07|N|O|1997-04-05|1997-03-17|1997-04-22|COLLECT COD|REG AIR|nstructions wake. furiously |
+3106|196|10|4|6|6577.14|0.10|0.07|N|O|1997-02-02|1997-04-11|1997-02-27|COLLECT COD|REG AIR|symptotes. slyly bold platelets cajol|
+3106|65|2|5|16|15440.96|0.09|0.08|N|O|1997-02-25|1997-04-10|1997-03-16|NONE|AIR|sits wake slyl|
+3107|149|6|1|16|16786.24|0.05|0.04|N|O|1997-08-30|1997-10-20|1997-09-20|TAKE BACK RETURN|REG AIR|regular pinto beans. ironic ideas haggle|
+3107|142|3|2|35|36474.90|0.05|0.06|N|O|1997-08-27|1997-11-19|1997-09-14|COLLECT COD|TRUCK|ets doubt furiously final ideas. final|
+3107|170|9|3|23|24613.91|0.03|0.06|N|O|1997-12-10|1997-11-11|1997-12-14|TAKE BACK RETURN|SHIP|atelets must ha|
+3107|87|8|4|27|26651.16|0.00|0.08|N|O|1997-11-15|1997-10-31|1997-11-28|DELIVER IN PERSON|FOB|furiously final |
+3108|109|2|1|37|37336.70|0.06|0.04|A|F|1993-10-16|1993-10-01|1993-11-09|DELIVER IN PERSON|RAIL| final requests. |
+3108|166|1|2|26|27720.16|0.08|0.05|A|F|1993-11-12|1993-10-05|1993-12-09|COLLECT COD|TRUCK| slyly slow foxes wake furious|
+3109|18|2|1|32|29376.32|0.08|0.03|A|F|1993-09-05|1993-10-06|1993-09-18|DELIVER IN PERSON|FOB|ecial orbits are furiou|
+3109|145|4|2|49|51211.86|0.08|0.06|R|F|1993-10-24|1993-09-30|1993-11-21|TAKE BACK RETURN|AIR| even pearls. furiously pending |
+3109|176|4|3|43|46275.31|0.04|0.07|R|F|1993-09-29|1993-09-06|1993-10-13|COLLECT COD|MAIL|ding to the foxes. |
+3109|79|10|4|26|25455.82|0.01|0.05|R|F|1993-11-16|1993-10-18|1993-12-06|TAKE BACK RETURN|TRUCK| sleep slyly according to t|
+3109|143|2|5|50|52157.00|0.01|0.08|A|F|1993-09-17|1993-10-16|1993-10-11|NONE|FOB| regular packages boost blithely even, re|
+3109|15|9|6|10|9150.10|0.10|0.04|A|F|1993-10-26|1993-10-03|1993-11-09|NONE|TRUCK|sits haggle carefully. regular, unusual ac|
+3110|89|10|1|1|989.08|0.02|0.07|A|F|1995-01-15|1995-01-20|1995-01-30|DELIVER IN PERSON|REG AIR|c theodolites a|
+3110|57|2|2|31|29668.55|0.01|0.06|R|F|1995-03-31|1995-03-07|1995-04-21|TAKE BACK RETURN|REG AIR|en deposits. ironic|
+3110|3|10|3|34|30702.00|0.02|0.02|A|F|1995-02-23|1995-01-27|1995-03-09|TAKE BACK RETURN|FOB|ly pending requests ha|
+3110|40|1|4|16|15040.64|0.04|0.04|A|F|1995-01-10|1995-02-06|1995-01-26|NONE|MAIL|across the regular acco|
+3110|140|6|5|39|40565.46|0.09|0.01|A|F|1995-02-09|1995-01-21|1995-02-21|NONE|MAIL|side of the blithely unusual courts. slyly |
+3111|137|8|1|22|22816.86|0.06|0.05|N|O|1995-09-21|1995-11-09|1995-10-17|COLLECT COD|REG AIR|quests. regular dolphins against the |
+3111|58|10|2|30|28741.50|0.06|0.05|N|O|1995-10-05|1995-11-15|1995-11-01|TAKE BACK RETURN|TRUCK|eas are furiously slyly special deposits.|
+3111|52|3|3|10|9520.50|0.02|0.02|N|O|1995-11-10|1995-11-02|1995-12-04|NONE|FOB|ng the slyly ironic inst|
+3111|132|3|4|31|31996.03|0.00|0.08|N|O|1995-10-26|1995-09-26|1995-11-02|TAKE BACK RETURN|MAIL|kages detect express attainments|
+3111|54|6|5|14|13356.70|0.05|0.04|N|O|1995-10-17|1995-10-19|1995-10-19|TAKE BACK RETURN|SHIP|re. pinto |
+3111|86|7|6|5|4930.40|0.03|0.08|N|O|1995-08-30|1995-10-16|1995-09-04|DELIVER IN PERSON|TRUCK|. carefully even ideas|
+3111|148|9|7|41|42973.74|0.09|0.05|N|O|1995-11-22|1995-11-01|1995-12-01|TAKE BACK RETURN|FOB|fily slow ideas. |
+3136|142|5|1|30|31264.20|0.02|0.08|R|F|1994-08-13|1994-10-02|1994-09-02|TAKE BACK RETURN|RAIL|leep blithel|
+3136|103|4|2|7|7021.70|0.05|0.07|A|F|1994-10-08|1994-09-14|1994-10-11|TAKE BACK RETURN|SHIP|ic pinto beans are slyly. f|
+3136|158|3|3|43|45500.45|0.00|0.07|A|F|1994-09-05|1994-09-25|1994-09-11|NONE|RAIL|. special theodolites ha|
+3136|116|6|4|26|26418.86|0.04|0.05|A|F|1994-10-13|1994-11-07|1994-11-05|TAKE BACK RETURN|AIR|eep fluffily. daringly silent attainments d|
+3136|67|8|5|2|1934.12|0.08|0.07|R|F|1994-11-21|1994-11-03|1994-11-26|DELIVER IN PERSON|TRUCK|? special, silent |
+3136|80|1|6|29|28422.32|0.08|0.07|A|F|1994-11-16|1994-10-03|1994-12-14|NONE|FOB|latelets. final |
+3137|3|4|1|6|5418.00|0.02|0.02|N|O|1995-09-19|1995-10-23|1995-10-16|NONE|SHIP|ly express as|
+3137|6|3|2|4|3624.00|0.06|0.04|N|O|1995-10-01|1995-09-11|1995-10-30|COLLECT COD|RAIL|posits wake. silent excuses boost about|
+3138|93|5|1|7|6951.63|0.05|0.05|R|F|1994-03-04|1994-03-14|1994-03-20|NONE|AIR|lithely quickly even packages. packages|
+3138|44|5|2|27|25489.08|0.09|0.01|R|F|1994-03-24|1994-03-23|1994-04-18|DELIVER IN PERSON|FOB|counts cajole fluffily carefully special i|
+3138|197|8|3|32|35110.08|0.00|0.01|R|F|1994-02-24|1994-05-07|1994-02-28|TAKE BACK RETURN|MAIL|inal foxes affix slyly. fluffily regul|
+3138|172|3|4|38|40742.46|0.07|0.04|R|F|1994-02-21|1994-03-21|1994-03-13|COLLECT COD|FOB|lithely fluffily un|
+3138|10|1|5|12|10920.12|0.09|0.02|A|F|1994-03-04|1994-04-11|1994-03-21|COLLECT COD|FOB|. bold pinto beans haggl|
+3138|44|7|6|25|23601.00|0.05|0.08|A|F|1994-05-19|1994-04-07|1994-06-17|TAKE BACK RETURN|AIR|dolites around the carefully busy the|
+3139|40|6|1|46|43241.84|0.08|0.03|R|F|1992-04-28|1992-03-04|1992-05-19|TAKE BACK RETURN|FOB|of the unusual, unusual re|
+3140|7|4|1|21|19047.00|0.08|0.02|R|F|1992-04-12|1992-05-31|1992-04-21|NONE|REG AIR| furiously sly excuses according to the|
+3140|89|10|2|10|9890.80|0.07|0.01|A|F|1992-05-30|1992-05-09|1992-06-09|COLLECT COD|RAIL|accounts. expres|
+3140|133|4|3|28|28927.64|0.06|0.00|R|F|1992-06-08|1992-07-07|1992-07-08|TAKE BACK RETURN|SHIP|lar ideas. slyly ironic d|
+3141|177|6|1|32|34469.44|0.06|0.00|N|O|1995-11-21|1995-12-18|1995-11-26|DELIVER IN PERSON|FOB|oxes are quickly about t|
+3141|10|7|2|37|33670.37|0.10|0.05|N|O|1996-01-24|1995-12-16|1996-01-27|DELIVER IN PERSON|AIR|press pinto beans. bold accounts boost b|
+3141|79|7|3|9|8811.63|0.09|0.02|N|O|1995-11-11|1995-12-10|1995-12-02|DELIVER IN PERSON|MAIL|uickly ironic, pendi|
+3141|46|9|4|47|44463.88|0.03|0.01|N|O|1995-11-29|1996-01-13|1995-12-10|TAKE BACK RETURN|TRUCK| are slyly pi|
+3142|120|7|1|15|15301.80|0.03|0.08|R|F|1992-08-15|1992-08-18|1992-08-22|DELIVER IN PERSON|AIR|instructions are. ironic packages doz|
+3143|90|1|1|22|21781.98|0.02|0.00|A|F|1993-05-11|1993-03-26|1993-05-20|TAKE BACK RETURN|MAIL|l, special instructions nag |
+3143|183|4|2|40|43327.20|0.03|0.08|A|F|1993-05-07|1993-03-29|1993-05-17|COLLECT COD|FOB|sly unusual theodolites. slyly ev|
+3143|183|4|3|22|23829.96|0.05|0.03|A|F|1993-03-18|1993-05-09|1993-04-14|DELIVER IN PERSON|MAIL|beans. fluf|
+3143|66|7|4|46|44438.76|0.05|0.08|R|F|1993-04-19|1993-03-21|1993-05-05|COLLECT COD|REG AIR|low forges haggle. even packages use bli|
+3168|60|8|1|46|44162.76|0.08|0.08|R|F|1992-02-14|1992-03-02|1992-03-02|TAKE BACK RETURN|SHIP|y across the express accounts. fluff|
+3168|154|5|2|1|1054.15|0.06|0.08|A|F|1992-05-27|1992-03-12|1992-06-09|TAKE BACK RETURN|SHIP|pinto beans. slyly regular courts haggle |
+3168|128|3|3|13|13365.56|0.09|0.02|A|F|1992-03-05|1992-04-29|1992-03-15|NONE|SHIP|ironic somas haggle quick|
+3168|165|10|4|11|11716.76|0.02|0.05|R|F|1992-04-12|1992-03-17|1992-05-12|COLLECT COD|SHIP|ously furious dependenc|
+3169|192|4|1|12|13106.28|0.01|0.04|R|F|1994-01-05|1994-03-18|1994-01-21|COLLECT COD|REG AIR| regular d|
+3169|200|3|2|17|18703.40|0.05|0.04|R|F|1994-03-02|1994-01-21|1994-03-03|DELIVER IN PERSON|TRUCK|usly regular packages. ironi|
+3169|188|9|3|12|13058.16|0.08|0.07|A|F|1994-04-18|1994-03-12|1994-05-08|TAKE BACK RETURN|FOB|atelets. pac|
+3169|105|6|4|26|26132.60|0.10|0.04|R|F|1994-04-08|1994-03-21|1994-04-29|NONE|TRUCK|ter the regular ideas. slyly iro|
+3169|108|9|5|6|6048.60|0.09|0.01|A|F|1994-03-24|1994-02-22|1994-04-04|TAKE BACK RETURN|AIR|ular instructions. ca|
+3169|177|8|6|46|49549.82|0.02|0.07|A|F|1994-02-01|1994-01-22|1994-02-24|DELIVER IN PERSON|RAIL|thely bold theodolites are fl|
+3170|40|6|1|12|11280.48|0.03|0.03|N|O|1998-02-12|1998-01-17|1998-02-24|NONE|TRUCK|ing accounts along the speci|
+3170|100|2|2|21|21002.10|0.01|0.00|N|O|1997-12-09|1998-01-31|1997-12-21|DELIVER IN PERSON|MAIL|o beans. carefully final requests dou|
+3170|89|10|3|27|26705.16|0.00|0.05|N|O|1998-02-25|1998-01-29|1998-02-27|COLLECT COD|AIR|efully bold foxes. regular, ev|
+3170|41|2|4|34|31995.36|0.05|0.04|N|O|1998-02-01|1998-01-11|1998-02-20|TAKE BACK RETURN|TRUCK|s about the fluffily final de|
+3170|90|1|5|32|31682.88|0.02|0.04|N|O|1997-11-24|1997-12-12|1997-12-15|COLLECT COD|SHIP|ggle about the furiously r|
+3170|110|5|6|43|43434.73|0.08|0.05|N|O|1998-01-05|1998-01-04|1998-01-14|NONE|REG AIR|. express dolphins use sly|
+3170|84|5|7|26|25586.08|0.10|0.05|N|O|1998-02-12|1997-12-22|1998-02-28|COLLECT COD|TRUCK|s engage furiously. |
+3171|47|4|1|34|32199.36|0.04|0.00|A|F|1993-05-30|1993-05-27|1993-06-06|DELIVER IN PERSON|REG AIR|r the final, even packages. quickly|
+3171|139|10|2|50|51956.50|0.01|0.04|A|F|1993-07-19|1993-05-15|1993-07-31|TAKE BACK RETURN|REG AIR|riously final foxes about the ca|
+3172|96|9|1|4|3984.36|0.06|0.07|A|F|1992-09-26|1992-08-15|1992-10-20|DELIVER IN PERSON|TRUCK|s are slyly thin package|
+3172|148|7|2|43|45070.02|0.05|0.07|R|F|1992-08-22|1992-07-07|1992-08-26|COLLECT COD|MAIL| final packages. |
+3172|132|3|3|13|13417.69|0.03|0.01|R|F|1992-07-06|1992-08-06|1992-08-05|DELIVER IN PERSON|MAIL|inal deposits haggle along the|
+3172|135|6|4|28|28983.64|0.08|0.04|R|F|1992-07-09|1992-07-14|1992-07-16|NONE|MAIL|regular ideas. packages are furi|
+3172|64|5|5|31|29885.86|0.05|0.08|A|F|1992-09-01|1992-08-27|1992-09-23|NONE|SHIP|. slyly regular dependencies haggle quiet|
+3173|195|6|1|35|38331.65|0.01|0.08|N|O|1996-09-09|1996-10-15|1996-10-04|TAKE BACK RETURN|RAIL| across the slyly even requests.|
+3173|178|7|2|5|5390.85|0.09|0.07|N|O|1996-12-06|1996-09-17|1996-12-07|DELIVER IN PERSON|REG AIR|express depo|
+3173|46|9|3|16|15136.64|0.06|0.01|N|O|1996-08-12|1996-09-21|1996-08-22|NONE|SHIP|e special,|
+3173|94|5|4|2|1988.18|0.00|0.00|N|O|1996-10-15|1996-11-06|1996-10-18|COLLECT COD|MAIL|ular pearls|
+3173|185|6|5|2|2170.36|0.00|0.06|N|O|1996-08-18|1996-09-21|1996-09-07|DELIVER IN PERSON|MAIL|fluffily above t|
+3174|186|7|1|6|6517.08|0.04|0.08|N|O|1996-03-13|1996-02-09|1996-03-22|DELIVER IN PERSON|AIR| furiously ironic|
+3174|194|7|2|4|4376.76|0.01|0.05|N|O|1995-11-17|1996-01-08|1995-11-27|DELIVER IN PERSON|RAIL|deas sleep thi|
+3174|92|4|3|21|20833.89|0.08|0.05|N|O|1996-02-20|1995-12-28|1996-03-17|NONE|MAIL|iously. idly bold theodolites a|
+3174|192|6|4|13|14198.47|0.08|0.06|N|O|1996-01-11|1996-01-26|1996-02-01|DELIVER IN PERSON|SHIP|leep quickly? slyly special platelets|
+3174|72|2|5|39|37910.73|0.02|0.06|N|O|1995-12-02|1996-02-08|1995-12-12|TAKE BACK RETURN|TRUCK| wake slyly foxes. bold requests p|
+3174|120|7|6|8|8160.96|0.07|0.08|N|O|1995-12-07|1996-01-08|1995-12-29|DELIVER IN PERSON|TRUCK|nic deposits among t|
+3175|120|10|1|28|28563.36|0.10|0.01|R|F|1994-09-27|1994-10-05|1994-10-04|NONE|FOB|ore the even, silent foxes. b|
+3175|1|4|2|38|34238.00|0.01|0.07|R|F|1994-10-10|1994-08-25|1994-10-28|NONE|MAIL|the quickly even dolph|
+3175|129|4|3|12|12349.44|0.09|0.07|R|F|1994-10-16|1994-09-15|1994-10-18|NONE|AIR|ter the pending deposits. slyly e|
+3175|85|6|4|14|13791.12|0.02|0.05|R|F|1994-10-21|1994-09-05|1994-11-15|NONE|MAIL|nt dependencies are quietly even |
+3175|18|8|5|47|43146.47|0.08|0.03|R|F|1994-08-08|1994-09-10|1994-08-21|COLLECT COD|REG AIR| final requests x-r|
+3175|175|6|6|44|47307.48|0.01|0.00|R|F|1994-09-26|1994-08-30|1994-10-24|TAKE BACK RETURN|MAIL|are carefully furiously ironic accounts. e|
+3175|1|4|7|32|28832.00|0.01|0.02|R|F|1994-09-29|1994-09-20|1994-10-10|TAKE BACK RETURN|SHIP|lites sleep|
+3200|116|6|1|17|17273.87|0.10|0.00|N|O|1996-06-06|1996-04-21|1996-06-14|DELIVER IN PERSON|AIR|side of the furiously pendin|
+3200|166|1|2|27|28786.32|0.03|0.00|N|O|1996-05-07|1996-05-01|1996-05-09|TAKE BACK RETURN|REG AIR|as haggle furiously against the fluff|
+3200|131|2|3|36|37120.68|0.01|0.01|N|O|1996-03-22|1996-03-19|1996-03-30|DELIVER IN PERSON|FOB|f the carefu|
+3200|30|9|4|11|10230.33|0.10|0.02|N|O|1996-03-18|1996-03-21|1996-04-14|COLLECT COD|RAIL|osits sleep fur|
+3200|198|9|5|16|17571.04|0.05|0.00|N|O|1996-02-28|1996-03-13|1996-03-11|NONE|RAIL|ly against the quiet packages. blith|
+3200|175|3|6|25|26879.25|0.10|0.01|N|O|1996-02-08|1996-04-11|1996-03-06|COLLECT COD|FOB| slyly regular hockey players! pinto beans |
+3201|46|7|1|11|10406.44|0.10|0.06|A|F|1993-09-27|1993-08-29|1993-10-18|NONE|TRUCK|ing to the furiously expr|
+3201|118|5|2|27|27488.97|0.08|0.02|R|F|1993-08-31|1993-08-24|1993-09-08|TAKE BACK RETURN|FOB|deposits are slyly along|
+3201|119|6|3|50|50955.50|0.00|0.08|R|F|1993-10-27|1993-09-30|1993-11-16|COLLECT COD|TRUCK| deposits. express, ir|
+3202|183|4|1|30|32495.40|0.09|0.02|A|F|1993-03-18|1993-03-10|1993-03-23|COLLECT COD|SHIP|ven platelets. furiously final|
+3202|20|4|2|22|20240.44|0.01|0.02|R|F|1993-02-16|1993-02-16|1993-03-16|TAKE BACK RETURN|MAIL|the express packages. fu|
+3203|144|5|1|23|24015.22|0.01|0.07|N|O|1998-01-04|1998-01-12|1998-01-24|COLLECT COD|SHIP|uses. fluffily ironic pinto bea|
+3203|188|9|2|22|23939.96|0.03|0.03|N|O|1998-02-12|1998-01-01|1998-02-18|TAKE BACK RETURN|REG AIR|e the blithely regular accounts boost f|
+3204|12|2|1|10|9120.10|0.10|0.07|R|F|1993-01-27|1993-03-08|1993-01-29|COLLECT COD|SHIP|counts. bold |
+3204|7|10|2|39|35373.00|0.10|0.03|R|F|1993-02-11|1993-03-19|1993-02-28|TAKE BACK RETURN|MAIL|sits sleep theodolites. slyly bo|
+3205|68|5|1|7|6776.42|0.09|0.00|R|F|1992-07-05|1992-06-17|1992-07-07|NONE|SHIP|ly alongsi|
+3205|29|10|2|32|29728.64|0.08|0.03|A|F|1992-06-01|1992-07-10|1992-06-06|TAKE BACK RETURN|RAIL|lar accoun|
+3205|103|6|3|38|38117.80|0.10|0.08|A|F|1992-07-31|1992-06-03|1992-08-20|DELIVER IN PERSON|AIR|usly quiet accounts. slyly pending pinto |
+3205|56|7|4|10|9560.50|0.01|0.07|A|F|1992-06-18|1992-07-04|1992-07-16|COLLECT COD|RAIL| deposits cajole careful|
+3205|70|9|5|18|17461.26|0.03|0.03|A|F|1992-07-04|1992-06-14|1992-08-03|TAKE BACK RETURN|RAIL|symptotes. slyly even deposits ar|
+3205|195|8|6|19|20808.61|0.07|0.08|R|F|1992-05-28|1992-05-30|1992-06-05|COLLECT COD|AIR|yly pending packages snooz|
+3205|69|8|7|36|34886.16|0.06|0.03|A|F|1992-05-31|1992-06-19|1992-06-03|TAKE BACK RETURN|SHIP|s. ironic platelets above the s|
+3206|176|4|1|1|1076.17|0.07|0.05|N|O|1996-11-22|1996-10-16|1996-12-07|TAKE BACK RETURN|FOB|y unusual foxes cajole ab|
+3206|111|5|2|37|37411.07|0.07|0.01|N|O|1996-09-06|1996-10-31|1996-09-25|COLLECT COD|SHIP| quick theodolites hagg|
+3206|186|7|3|24|26068.32|0.00|0.08|N|O|1996-08-25|1996-10-01|1996-09-04|COLLECT COD|TRUCK|encies sleep deposits--|
+3207|113|3|1|2|2026.22|0.10|0.03|N|O|1998-06-15|1998-04-20|1998-06-21|COLLECT COD|MAIL|among the ironic, even packages |
+3207|71|9|2|42|40784.94|0.00|0.00|N|O|1998-05-02|1998-05-10|1998-06-01|NONE|SHIP|to the quickly special accounts? ironically|
+3207|152|7|3|17|17886.55|0.03|0.04|N|O|1998-03-27|1998-04-06|1998-03-28|COLLECT COD|RAIL|eep against the instructions. gifts hag|
+3207|19|6|4|32|29408.32|0.00|0.03|N|O|1998-06-17|1998-04-26|1998-07-07|TAKE BACK RETURN|SHIP|y across the slyly express foxes. bl|
+3207|83|4|5|8|7864.64|0.00|0.06|N|O|1998-06-13|1998-04-26|1998-07-11|COLLECT COD|SHIP|y. final pint|
+3207|134|5|6|32|33092.16|0.03|0.05|N|O|1998-04-19|1998-05-01|1998-05-08|COLLECT COD|FOB|l deposits wake beyond the carefully|
+3232|14|5|1|22|20108.22|0.10|0.01|A|F|1992-11-30|1992-12-09|1992-12-04|NONE|RAIL|thely. furio|
+3232|135|1|2|34|35194.42|0.07|0.04|R|F|1993-01-09|1992-11-14|1993-02-03|NONE|SHIP|old packages integrate quickly |
+3232|181|2|3|3|3243.54|0.04|0.06|R|F|1992-12-14|1992-12-11|1992-12-29|DELIVER IN PERSON|FOB|ily blithely ironic acco|
+3233|51|2|1|23|21874.15|0.04|0.05|A|F|1994-12-07|1995-01-11|1994-12-26|NONE|AIR|pending instructions use after the carefu|
+3233|154|6|2|6|6324.90|0.02|0.08|A|F|1994-12-06|1994-12-05|1994-12-07|TAKE BACK RETURN|REG AIR|requests are quickly above the slyly p|
+3233|100|4|3|2|2000.20|0.04|0.06|R|F|1995-01-03|1995-01-02|1995-01-21|TAKE BACK RETURN|AIR| across the bold packages|
+3233|9|2|4|25|22725.00|0.04|0.07|A|F|1994-11-24|1995-01-07|1994-12-11|NONE|RAIL|oss the pl|
+3234|79|10|1|45|44058.15|0.01|0.04|N|O|1996-05-15|1996-05-09|1996-06-02|DELIVER IN PERSON|TRUCK| express packages are carefully. f|
+3234|84|5|2|23|22633.84|0.03|0.00|N|O|1996-05-29|1996-05-15|1996-06-17|DELIVER IN PERSON|AIR|d-- fluffily special packag|
+3234|75|4|3|16|15601.12|0.06|0.05|N|O|1996-06-10|1996-05-30|1996-06-18|COLLECT COD|RAIL|ithely ironic accounts wake along t|
+3234|122|1|4|50|51106.00|0.09|0.05|N|O|1996-06-11|1996-05-19|1996-06-18|NONE|MAIL|ly regular ideas according to the regula|
+3234|165|2|5|14|14912.24|0.01|0.07|N|O|1996-04-06|1996-05-30|1996-04-13|NONE|REG AIR|lithely regular f|
+3235|109|2|1|9|9081.90|0.07|0.00|N|O|1995-11-17|1995-12-24|1995-11-30|COLLECT COD|AIR|l courts sleep quickly slyly |
+3235|95|6|2|43|42788.87|0.10|0.07|N|O|1995-12-25|1996-01-23|1996-01-09|COLLECT COD|MAIL|ckly final instru|
+3235|138|9|3|29|30105.77|0.06|0.06|N|O|1996-01-28|1995-12-26|1996-02-12|DELIVER IN PERSON|RAIL|e fluffy pinto bea|
+3235|178|9|4|23|24797.91|0.00|0.01|N|O|1996-02-16|1996-01-05|1996-03-07|DELIVER IN PERSON|SHIP|ldly ironic pinto beans|
+3236|117|4|1|10|10171.10|0.06|0.05|N|O|1996-11-15|1996-12-14|1996-11-29|TAKE BACK RETURN|AIR|arefully. fluffily reg|
+3236|122|7|2|21|21464.52|0.01|0.07|N|O|1996-12-23|1996-12-12|1997-01-21|NONE|AIR| final pinto |
+3236|118|2|3|7|7126.77|0.07|0.01|N|O|1996-12-27|1996-12-18|1997-01-24|DELIVER IN PERSON|SHIP|dolites. slyly unus|
+3237|11|5|1|11|10021.11|0.02|0.07|A|F|1992-08-03|1992-07-31|1992-08-13|TAKE BACK RETURN|AIR|es. permanently express platelets besid|
+3238|72|3|1|12|11664.84|0.06|0.01|R|F|1993-03-06|1993-05-08|1993-04-01|DELIVER IN PERSON|AIR|ackages affix furiously. furiously bol|
+3238|173|2|2|26|27902.42|0.01|0.06|A|F|1993-02-25|1993-04-04|1993-03-20|TAKE BACK RETURN|REG AIR|g accounts sleep furiously ironic attai|
+3238|81|2|3|1|981.08|0.00|0.04|R|F|1993-05-17|1993-04-18|1993-05-27|NONE|SHIP|wake alongs|
+3239|45|8|1|50|47252.00|0.05|0.01|N|O|1998-02-09|1998-04-02|1998-02-22|NONE|FOB|d blithely stea|
+3239|45|8|2|43|40636.72|0.01|0.06|N|O|1998-01-15|1998-03-12|1998-01-29|COLLECT COD|REG AIR|y. bold pinto beans use |
+3239|13|7|3|13|11869.13|0.01|0.05|N|O|1998-02-10|1998-02-19|1998-02-25|DELIVER IN PERSON|MAIL|r deposits solve fluf|
+3239|195|6|4|26|28474.94|0.03|0.05|N|O|1998-01-21|1998-03-21|1998-02-08|DELIVER IN PERSON|SHIP|ngly pending platelets are fluff|
+3239|12|9|5|31|28272.31|0.10|0.08|N|O|1998-04-14|1998-03-24|1998-04-17|DELIVER IN PERSON|FOB|foxes. pendin|
+3264|200|1|1|39|42907.80|0.06|0.06|N|O|1996-11-07|1996-12-12|1996-11-20|TAKE BACK RETURN|REG AIR|sleep carefully after the slyly final|
+3264|131|2|2|34|35058.42|0.00|0.01|N|O|1997-01-03|1997-01-06|1997-01-29|TAKE BACK RETURN|REG AIR|rns haggle carefully. blit|
+3264|125|8|3|11|11276.32|0.09|0.03|N|O|1996-12-11|1996-12-19|1996-12-15|DELIVER IN PERSON|SHIP|regular packages|
+3264|109|10|4|24|24218.40|0.09|0.07|N|O|1997-01-07|1996-12-13|1997-01-11|TAKE BACK RETURN|RAIL|ctions. quick|
+3264|63|4|5|6|5778.36|0.04|0.03|N|O|1996-11-10|1996-12-05|1996-11-22|TAKE BACK RETURN|SHIP|press packages. ironical|
+3264|141|2|6|43|44769.02|0.06|0.06|N|O|1997-01-17|1997-01-24|1997-02-01|TAKE BACK RETURN|TRUCK|leep at the blithely bold|
+3265|25|4|1|8|7400.16|0.06|0.02|A|F|1992-09-01|1992-09-12|1992-09-27|DELIVER IN PERSON|TRUCK|thely ironic requests sleep slyly-- i|
+3265|72|2|2|7|6804.49|0.09|0.00|R|F|1992-09-16|1992-09-04|1992-10-14|DELIVER IN PERSON|MAIL|he forges. fluffily regular asym|
+3265|191|4|3|28|30553.32|0.09|0.08|A|F|1992-10-22|1992-08-23|1992-10-25|NONE|RAIL|n requests. quickly final dinos|
+3266|64|1|1|31|29885.86|0.09|0.02|N|O|1995-06-19|1995-05-04|1995-07-06|COLLECT COD|MAIL|grate among the quickly express deposits|
+3266|38|4|2|43|40335.29|0.06|0.07|R|F|1995-05-04|1995-05-30|1995-05-11|COLLECT COD|AIR|ular asymptotes use careful|
+3267|185|6|1|33|35810.94|0.06|0.01|N|O|1997-03-30|1997-03-25|1997-04-23|TAKE BACK RETURN|AIR|es boost. |
+3268|96|7|1|1|996.09|0.06|0.08|A|F|1994-09-12|1994-08-31|1994-09-16|NONE|TRUCK|. ironic, bold requests use carefull|
+3268|42|9|2|40|37681.60|0.08|0.01|R|F|1994-06-30|1994-08-22|1994-07-25|COLLECT COD|FOB|ly. bold, eve|
+3269|161|10|1|40|42446.40|0.02|0.07|N|O|1996-06-11|1996-05-06|1996-06-15|DELIVER IN PERSON|TRUCK|es. pending d|
+3269|38|4|2|46|43149.38|0.00|0.02|N|O|1996-04-21|1996-04-12|1996-05-10|DELIVER IN PERSON|MAIL|final asymptotes nag|
+3269|44|3|3|39|36817.56|0.02|0.03|N|O|1996-03-13|1996-05-26|1996-03-19|COLLECT COD|MAIL|he express packages?|
+3269|83|4|4|37|36373.96|0.07|0.05|N|O|1996-06-14|1996-04-27|1996-07-07|NONE|MAIL|egular requests. carefully un|
+3269|93|7|5|42|41709.78|0.09|0.05|N|O|1996-03-19|1996-04-24|1996-04-18|COLLECT COD|TRUCK| the special packages. |
+3269|131|7|6|16|16498.08|0.01|0.08|N|O|1996-03-03|1996-04-06|1996-03-06|NONE|RAIL|s cajole. silent deposits are f|
+3270|35|1|1|11|10285.33|0.07|0.06|N|O|1997-07-29|1997-08-11|1997-08-05|TAKE BACK RETURN|AIR| solve at the regular deposits. |
+3270|38|4|2|44|41273.32|0.10|0.05|N|O|1997-07-20|1997-08-15|1997-08-04|DELIVER IN PERSON|SHIP| accounts. carefully even |
+3270|65|4|3|20|19301.20|0.01|0.02|N|O|1997-08-26|1997-07-31|1997-08-30|DELIVER IN PERSON|FOB|en accounts among the c|
+3270|189|10|4|29|31586.22|0.06|0.05|N|O|1997-07-01|1997-07-23|1997-07-10|TAKE BACK RETURN|MAIL|sly regular asymptotes. slyly dog|
+3270|34|10|5|32|29888.96|0.03|0.00|N|O|1997-09-23|1997-08-17|1997-09-27|NONE|REG AIR|promise carefully.|
+3270|57|5|6|29|27754.45|0.01|0.04|N|O|1997-08-22|1997-08-17|1997-09-06|COLLECT COD|RAIL|ptotes nag above the quickly bold deposits|
+3270|117|1|7|9|9153.99|0.06|0.08|N|O|1997-08-14|1997-08-11|1997-09-09|DELIVER IN PERSON|SHIP|ual packages|
+3271|57|9|1|30|28711.50|0.01|0.04|A|F|1992-01-16|1992-03-20|1992-01-17|DELIVER IN PERSON|AIR|r the unusual Tiresia|
+3271|54|5|2|18|17172.90|0.09|0.06|R|F|1992-05-01|1992-03-28|1992-05-29|DELIVER IN PERSON|FOB| packages eat around the furiously regul|
+3271|95|6|3|14|13931.26|0.05|0.01|A|F|1992-02-24|1992-02-14|1992-03-23|NONE|AIR|ending, even packa|
+3271|64|1|4|29|27957.74|0.07|0.04|A|F|1992-03-10|1992-02-05|1992-03-14|COLLECT COD|MAIL|lar instructions. carefully regular|
+3296|84|5|1|12|11808.96|0.06|0.07|R|F|1994-12-08|1994-12-14|1994-12-24|COLLECT COD|AIR|y about the slyly bold pinto bea|
+3296|149|8|2|31|32523.34|0.08|0.00|R|F|1995-01-26|1994-12-25|1995-02-16|NONE|REG AIR|ainst the furi|
+3296|185|6|3|29|31470.22|0.02|0.04|A|F|1995-01-12|1994-11-26|1995-02-06|DELIVER IN PERSON|SHIP|ss ideas are reg|
+3296|140|1|4|47|48886.58|0.06|0.00|A|F|1994-11-08|1994-12-20|1994-11-30|NONE|FOB|egular deposits. quic|
+3296|177|6|5|16|17234.72|0.06|0.02|R|F|1995-01-11|1994-12-27|1995-01-12|DELIVER IN PERSON|SHIP|kages cajole carefully |
+3296|197|1|6|40|43887.60|0.00|0.04|A|F|1994-12-28|1994-12-08|1995-01-13|COLLECT COD|REG AIR|ronic ideas across the|
+3296|36|2|7|6|5616.18|0.02|0.01|R|F|1995-01-03|1994-12-23|1995-01-27|TAKE BACK RETURN|AIR|carefully fur|
+3297|134|10|1|10|10341.30|0.10|0.04|A|F|1992-12-14|1993-01-21|1992-12-26|NONE|SHIP|ironic idea|
+3298|149|6|1|9|9442.26|0.01|0.06|N|O|1996-08-15|1996-05-24|1996-09-12|COLLECT COD|REG AIR|ly final accou|
+3298|186|7|2|27|29326.86|0.06|0.06|N|O|1996-07-10|1996-05-21|1996-07-15|DELIVER IN PERSON|FOB|lar packages. regular deposit|
+3298|29|2|3|25|23225.50|0.10|0.08|N|O|1996-06-30|1996-05-31|1996-07-23|COLLECT COD|SHIP|ly express f|
+3298|191|5|4|1|1091.19|0.10|0.03|N|O|1996-07-31|1996-05-23|1996-08-24|TAKE BACK RETURN|FOB|refully regular requ|
+3299|183|4|1|40|43327.20|0.03|0.02|A|F|1994-03-21|1994-03-23|1994-04-12|COLLECT COD|AIR|lyly even request|
+3300|129|4|1|3|3087.36|0.07|0.02|N|O|1995-11-01|1995-10-02|1995-11-20|NONE|REG AIR|g according to the dugouts. caref|
+3300|149|10|2|23|24130.22|0.02|0.02|N|O|1995-08-17|1995-09-03|1995-09-04|COLLECT COD|TRUCK|he fluffily final a|
+3301|169|8|1|45|48112.20|0.04|0.05|A|F|1994-11-19|1994-10-27|1994-11-24|TAKE BACK RETURN|FOB|nusual, final excuses after the entici|
+3302|36|2|1|45|42121.35|0.09|0.00|N|O|1996-01-24|1995-12-16|1996-02-13|COLLECT COD|FOB|counts use quickl|
+3303|184|5|1|25|27104.50|0.06|0.01|N|O|1998-03-25|1998-01-31|1998-04-12|NONE|SHIP|lly regular pi|
+3303|21|2|2|15|13815.30|0.04|0.06|N|O|1998-01-29|1998-01-22|1998-02-21|COLLECT COD|SHIP| detect sly|
+3303|99|10|3|37|36966.33|0.05|0.02|N|O|1998-02-16|1998-03-07|1998-02-18|TAKE BACK RETURN|TRUCK| carefully ironic asympt|
+3303|36|2|4|26|24336.78|0.09|0.00|N|O|1998-01-18|1998-03-11|1998-02-11|DELIVER IN PERSON|REG AIR|ickly permanent requests w|
+3328|113|7|1|6|6078.66|0.03|0.08|A|F|1993-03-07|1993-01-25|1993-03-29|COLLECT COD|TRUCK|ffily even instructions detect b|
+3328|5|2|2|23|20815.00|0.01|0.06|R|F|1993-01-12|1993-02-07|1993-01-30|TAKE BACK RETURN|MAIL|y. careful|
+3328|139|10|3|44|45721.72|0.05|0.00|R|F|1992-12-03|1992-12-19|1992-12-09|TAKE BACK RETURN|FOB|dly quickly final foxes? re|
+3328|95|9|4|42|41793.78|0.01|0.05|R|F|1992-11-24|1992-12-20|1992-12-06|DELIVER IN PERSON|AIR|ronic requests|
+3328|131|7|5|25|25778.25|0.05|0.00|R|F|1993-01-28|1993-01-04|1993-01-31|NONE|RAIL|e unusual, r|
+3329|138|4|1|36|37372.68|0.09|0.08|N|O|1995-08-06|1995-08-03|1995-08-14|DELIVER IN PERSON|TRUCK|ts at the re|
+3329|6|3|2|9|8154.00|0.00|0.02|N|O|1995-07-24|1995-08-02|1995-08-01|COLLECT COD|MAIL|lly final depo|
+3329|123|4|3|1|1023.12|0.04|0.08|N|O|1995-08-22|1995-09-28|1995-09-09|COLLECT COD|REG AIR|regular packages are carefull|
+3330|20|7|1|49|45080.98|0.05|0.01|R|F|1995-03-02|1995-03-03|1995-03-16|DELIVER IN PERSON|TRUCK|haggle carefully alongside of the bold r|
+3331|64|9|1|9|8676.54|0.08|0.07|A|F|1993-07-18|1993-07-03|1993-08-16|TAKE BACK RETURN|AIR|odolites. bold accounts|
+3331|21|2|2|38|34998.76|0.06|0.04|R|F|1993-07-24|1993-06-22|1993-08-23|NONE|AIR|ymptotes haggle across the ca|
+3331|3|10|3|26|23478.00|0.09|0.05|A|F|1993-08-05|1993-07-17|1993-08-29|DELIVER IN PERSON|MAIL|p asymptotes. carefully unusual in|
+3332|84|5|1|28|27554.24|0.10|0.02|R|F|1994-12-30|1995-01-16|1995-01-16|COLLECT COD|FOB|s against the carefully special multipl|
+3332|136|2|2|21|21758.73|0.08|0.04|R|F|1995-02-04|1995-01-08|1995-02-06|COLLECT COD|MAIL| quick packages sle|
+3332|134|5|3|27|27921.51|0.03|0.02|A|F|1994-12-10|1995-01-14|1994-12-11|TAKE BACK RETURN|FOB|ording to the slyly regula|
+3333|150|9|1|27|28354.05|0.06|0.08|A|F|1992-12-06|1992-10-26|1992-12-07|COLLECT COD|SHIP|s dazzle fluffil|
+3333|199|3|2|36|39570.84|0.08|0.07|R|F|1992-11-20|1992-11-06|1992-12-16|TAKE BACK RETURN|FOB|foxes sleep neve|
+3333|108|1|3|38|38307.80|0.05|0.05|A|F|1992-10-30|1992-11-03|1992-11-04|NONE|MAIL|ccounts promise bl|
+3333|113|4|4|49|49642.39|0.07|0.07|R|F|1992-10-02|1992-11-30|1992-10-12|DELIVER IN PERSON|MAIL|riously ironic r|
+3333|43|2|5|45|42436.80|0.07|0.08|A|F|1992-10-04|1992-11-08|1992-10-27|COLLECT COD|SHIP|dolites. quickly r|
+3334|187|8|1|20|21743.60|0.04|0.03|N|O|1996-05-21|1996-04-08|1996-05-26|TAKE BACK RETURN|AIR|uses nag furiously. instructions are ca|
+3334|190|1|2|7|7631.33|0.09|0.07|N|O|1996-04-28|1996-04-08|1996-05-25|NONE|SHIP|nts sublate slyly express pack|
+3335|105|10|1|13|13066.30|0.06|0.07|N|O|1996-01-20|1995-12-20|1996-02-09|COLLECT COD|REG AIR|out the special asymptotes|
+3335|31|2|2|44|40965.32|0.07|0.02|N|O|1996-01-05|1995-12-25|1996-01-18|DELIVER IN PERSON|SHIP|r packages cajole ac|
+3335|140|6|3|16|16642.24|0.01|0.06|N|O|1995-10-18|1995-12-08|1995-11-03|DELIVER IN PERSON|SHIP|g packages. carefully regular reque|
+3335|90|1|4|47|46534.23|0.10|0.03|N|O|1995-12-02|1995-11-19|1995-12-27|NONE|MAIL| quickly special ideas.|
+3360|174|4|1|31|33299.27|0.08|0.04|N|O|1998-04-24|1998-04-12|1998-05-23|COLLECT COD|REG AIR|quests. carefully even deposits wake acros|
+3360|91|3|2|29|28741.61|0.00|0.06|N|O|1998-04-15|1998-02-25|1998-05-13|TAKE BACK RETURN|FOB|press asymptotes. furiously final |
+3360|82|3|3|39|38301.12|0.08|0.03|N|O|1998-04-09|1998-04-20|1998-05-05|DELIVER IN PERSON|REG AIR|s. blithely express pinto bean|
+3360|117|7|4|29|29496.19|0.10|0.01|N|O|1998-05-19|1998-03-03|1998-06-09|TAKE BACK RETURN|FOB|hely gifts. spe|
+3360|58|6|5|4|3832.20|0.08|0.07|N|O|1998-02-27|1998-03-23|1998-03-28|COLLECT COD|SHIP|ly busy inst|
+3360|71|1|6|42|40784.94|0.04|0.01|N|O|1998-05-07|1998-04-18|1998-06-04|DELIVER IN PERSON|FOB|ages cajole. pending, |
+3361|144|5|1|6|6264.84|0.02|0.02|R|F|1992-10-02|1992-10-25|1992-10-05|DELIVER IN PERSON|FOB| packages sleep. furiously unus|
+3361|171|10|2|33|35348.61|0.01|0.02|R|F|1992-11-09|1992-10-15|1992-11-11|TAKE BACK RETURN|MAIL|uriously ironic accounts. ironic, ir|
+3361|191|5|3|31|33826.89|0.06|0.04|R|F|1992-08-29|1992-10-13|1992-09-08|NONE|FOB|ts. pending, regular accounts sleep fur|
+3362|22|5|1|14|12908.28|0.06|0.05|N|O|1995-08-01|1995-09-06|1995-08-22|NONE|FOB|even Tires|
+3362|195|6|2|41|44902.79|0.05|0.03|N|O|1995-10-31|1995-09-04|1995-11-17|COLLECT COD|REG AIR|ake alongside of the |
+3362|115|9|3|40|40604.40|0.05|0.06|N|O|1995-08-19|1995-10-17|1995-09-05|TAKE BACK RETURN|FOB|packages haggle furi|
+3362|2|7|4|3|2706.00|0.03|0.01|N|O|1995-08-26|1995-09-02|1995-09-17|NONE|SHIP|its cajole blithely excuses. de|
+3362|138|9|5|36|37372.68|0.06|0.00|N|O|1995-10-05|1995-08-28|1995-11-03|TAKE BACK RETURN|RAIL|es against the quickly permanent pint|
+3362|188|9|6|46|50056.28|0.09|0.05|N|O|1995-08-02|1995-10-12|1995-08-28|COLLECT COD|REG AIR|ly bold packages. regular deposits cajol|
+3363|10|3|1|42|38220.42|0.00|0.08|N|O|1995-11-09|1995-11-25|1995-11-15|TAKE BACK RETURN|RAIL| blithely final ideas nag after|
+3363|191|4|2|21|22914.99|0.08|0.08|N|O|1995-12-10|1995-10-28|1995-12-28|COLLECT COD|RAIL|he regular, brave deposits. f|
+3363|159|7|3|2|2118.30|0.01|0.07|N|O|1996-01-22|1995-12-01|1996-02-18|TAKE BACK RETURN|SHIP|uickly bold ide|
+3363|113|3|4|20|20262.20|0.07|0.06|N|O|1995-12-11|1995-11-15|1995-12-21|COLLECT COD|MAIL|carefully quiet excuses wake. sl|
+3363|200|4|5|4|4400.80|0.00|0.08|N|O|1995-10-30|1995-11-17|1995-11-22|COLLECT COD|FOB| ironic dependencie|
+3364|90|1|1|49|48514.41|0.03|0.05|N|O|1997-09-17|1997-08-23|1997-10-06|NONE|SHIP|d accounts? caref|
+3364|111|2|2|38|38422.18|0.02|0.02|N|O|1997-08-30|1997-09-12|1997-09-27|COLLECT COD|REG AIR| slyly express|
+3364|156|4|3|10|10561.50|0.00|0.01|N|O|1997-08-10|1997-08-24|1997-08-15|TAKE BACK RETURN|SHIP|g the accounts. final, busy accounts wi|
+3364|160|5|4|7|7421.12|0.10|0.05|N|O|1997-07-09|1997-08-01|1997-07-16|NONE|TRUCK|furiously regular ideas haggle furiously b|
+3364|81|2|5|3|2943.24|0.01|0.00|N|O|1997-10-19|1997-08-15|1997-10-28|TAKE BACK RETURN|TRUCK|c theodolites. blithely ir|
+3365|151|6|1|37|38892.55|0.02|0.08|R|F|1994-12-22|1995-02-07|1995-01-20|TAKE BACK RETURN|SHIP|requests. quickly pending instructions a|
+3365|167|2|2|37|39484.92|0.07|0.08|A|F|1994-11-24|1995-01-09|1994-11-27|NONE|REG AIR|oze blithely. furiously ironic theodolit|
+3365|115|6|3|13|13196.43|0.09|0.02|R|F|1995-02-25|1995-01-31|1995-03-16|NONE|RAIL|pths wake r|
+3365|176|4|4|49|52732.33|0.02|0.07|R|F|1995-01-03|1995-01-01|1995-01-18|COLLECT COD|MAIL|lyly unusual asymptotes. final|
+3365|16|3|5|2|1832.02|0.00|0.03|R|F|1995-02-04|1994-12-30|1995-03-06|TAKE BACK RETURN|FOB|es cajole fluffily pe|
+3365|126|5|6|24|24626.88|0.01|0.00|R|F|1995-02-27|1995-01-09|1995-03-27|DELIVER IN PERSON|REG AIR|into beans? carefully regula|
+3366|40|1|1|4|3760.16|0.07|0.01|N|O|1997-05-20|1997-06-25|1997-06-03|DELIVER IN PERSON|AIR| carefully about |
+3366|136|2|2|9|9325.17|0.00|0.08|N|O|1997-06-02|1997-07-05|1997-06-26|COLLECT COD|REG AIR|ackages sleep carefully across the bli|
+3367|41|10|1|27|25408.08|0.01|0.03|A|F|1993-04-13|1993-03-16|1993-04-26|NONE|RAIL|kly even instructions caj|
+3367|141|10|2|34|35398.76|0.04|0.08|A|F|1993-03-30|1993-02-23|1993-04-11|COLLECT COD|MAIL| accounts wake slyly |
+3367|120|7|3|38|38764.56|0.03|0.03|R|F|1993-03-13|1993-02-12|1993-03-31|NONE|RAIL|even packages sleep blithely slyly expr|
+3392|171|10|1|40|42846.80|0.01|0.01|N|O|1996-02-18|1995-12-16|1996-02-26|COLLECT COD|MAIL|ress instructions affix carefully. fur|
+3392|123|2|2|13|13300.56|0.09|0.02|N|O|1995-11-26|1996-01-17|1995-12-01|NONE|MAIL|across the fluffily bold deposits.|
+3392|127|10|3|34|34922.08|0.10|0.08|N|O|1996-01-20|1996-01-21|1996-01-24|DELIVER IN PERSON|MAIL|e carefully even braids. |
+3392|124|3|4|7|7168.84|0.08|0.05|N|O|1995-12-07|1996-01-09|1995-12-29|TAKE BACK RETURN|RAIL|as. express, final accounts dou|
+3393|117|7|1|16|16273.76|0.01|0.00|N|O|1995-07-17|1995-08-19|1995-08-04|COLLECT COD|TRUCK|uses. instructions after the blithely |
+3393|125|4|2|44|45105.28|0.08|0.04|N|O|1995-10-16|1995-08-05|1995-11-01|NONE|AIR|ld requests hag|
+3393|97|1|3|25|24927.25|0.07|0.02|N|O|1995-10-17|1995-08-12|1995-11-11|DELIVER IN PERSON|MAIL|ng excuses|
+3393|72|2|4|48|46659.36|0.06|0.06|N|O|1995-07-12|1995-09-15|1995-08-02|NONE|FOB| blithely final reques|
+3393|178|7|5|37|39892.29|0.07|0.02|N|O|1995-10-16|1995-08-19|1995-10-19|COLLECT COD|AIR|ss the slyly ironic pinto beans. ironic,|
+3393|62|7|6|17|16355.02|0.04|0.01|N|O|1995-08-15|1995-09-07|1995-09-10|COLLECT COD|MAIL|kly ironic deposits could|
+3394|155|6|1|33|34819.95|0.07|0.08|N|O|1996-08-07|1996-07-17|1996-09-02|TAKE BACK RETURN|SHIP|ideas alongside of th|
+3394|146|3|2|43|44984.02|0.08|0.03|N|O|1996-08-23|1996-07-20|1996-08-25|COLLECT COD|RAIL|hockey players. slyly regular requests afte|
+3394|88|9|3|26|25690.08|0.01|0.00|N|O|1996-08-08|1996-06-12|1996-09-05|TAKE BACK RETURN|RAIL|its use furiously. even, even account|
+3394|81|2|4|14|13735.12|0.08|0.00|N|O|1996-06-02|1996-07-02|1996-06-19|COLLECT COD|MAIL|e furiously final theodolites. furio|
+3394|127|8|5|30|30813.60|0.04|0.06|N|O|1996-05-12|1996-07-24|1996-05-19|COLLECT COD|REG AIR|t ideas according to the fluffily iro|
+3394|184|5|6|14|15178.52|0.05|0.05|N|O|1996-06-18|1996-06-24|1996-07-17|NONE|REG AIR|arefully regular do|
+3395|142|3|1|21|21884.94|0.03|0.06|R|F|1994-12-19|1995-01-13|1994-12-25|TAKE BACK RETURN|SHIP| careful dep|
+3395|36|2|2|38|35569.14|0.01|0.07|R|F|1995-01-13|1995-01-13|1995-01-25|COLLECT COD|SHIP| silent accounts are blithely|
+3395|43|4|3|43|40550.72|0.06|0.07|A|F|1994-12-13|1995-01-07|1994-12-14|COLLECT COD|AIR|ckages above the furiously regu|
+3395|122|1|4|39|39862.68|0.05|0.07|R|F|1994-12-03|1995-01-17|1994-12-10|NONE|AIR|riously unusual theodolites. fur|
+3396|128|7|1|34|34956.08|0.00|0.06|A|F|1994-05-30|1994-08-16|1994-06-11|NONE|AIR|. slyly unusual packages wak|
+3396|49|6|2|43|40808.72|0.03|0.08|A|F|1994-07-03|1994-08-09|1994-07-14|TAKE BACK RETURN|MAIL|cial packages cajole blithely around the |
+3396|138|4|3|9|9343.17|0.01|0.06|R|F|1994-07-01|1994-08-18|1994-07-21|DELIVER IN PERSON|AIR|usly special foxes. accounts wake careful|
+3396|75|3|4|32|31202.24|0.06|0.02|R|F|1994-08-07|1994-08-10|1994-09-05|COLLECT COD|TRUCK|osits are slyly. final, bold foxes s|
+3396|126|5|5|27|27705.24|0.02|0.01|A|F|1994-09-14|1994-07-26|1994-09-28|DELIVER IN PERSON|FOB| theodolites |
+3396|39|10|6|18|16902.54|0.10|0.00|A|F|1994-07-27|1994-06-26|1994-08-25|TAKE BACK RETURN|REG AIR|l requests haggle furiously along the fur|
+3396|198|2|7|31|34043.89|0.05|0.06|A|F|1994-06-07|1994-06-23|1994-06-19|TAKE BACK RETURN|REG AIR|l, express pinto beans. quic|
+3397|195|8|1|8|8761.52|0.07|0.01|A|F|1994-08-05|1994-08-11|1994-08-08|DELIVER IN PERSON|RAIL|y final foxes|
+3397|13|3|2|11|10043.11|0.00|0.07|A|F|1994-07-29|1994-09-18|1994-08-12|DELIVER IN PERSON|REG AIR|iously careful packages. s|
+3397|184|5|3|1|1084.18|0.07|0.05|R|F|1994-08-03|1994-07-30|1994-08-28|NONE|RAIL| regular packag|
+3397|86|7|4|33|32540.64|0.05|0.01|R|F|1994-09-04|1994-08-06|1994-09-22|COLLECT COD|RAIL|gular accounts. blithely re|
+3397|132|3|5|28|28899.64|0.05|0.05|R|F|1994-07-13|1994-08-26|1994-07-17|NONE|TRUCK|counts around the final reques|
+3398|173|4|1|1|1073.17|0.01|0.08|N|O|1996-11-22|1996-11-16|1996-12-09|COLLECT COD|MAIL| blithely final deposits.|
+3399|134|5|1|28|28955.64|0.09|0.05|N|O|1995-06-29|1995-05-19|1995-07-12|COLLECT COD|AIR|oggedly final theodolites grow. fi|
+3399|55|6|2|8|7640.40|0.01|0.05|A|F|1995-05-15|1995-04-19|1995-06-05|COLLECT COD|TRUCK|s use carefully carefully ir|
+3399|67|4|3|3|2901.18|0.03|0.00|N|F|1995-06-16|1995-04-04|1995-06-23|NONE|SHIP|hely pending dugouts |
+3399|14|5|4|21|19194.21|0.09|0.06|A|F|1995-03-12|1995-05-18|1995-03-28|TAKE BACK RETURN|MAIL|se final courts. exc|
+3424|181|2|1|39|42166.02|0.06|0.07|N|O|1996-11-03|1996-11-08|1996-11-23|DELIVER IN PERSON|MAIL|bits boost closely slyly p|
+3425|120|1|1|11|11221.32|0.03|0.08|N|O|1996-04-24|1996-05-29|1996-05-23|DELIVER IN PERSON|FOB|ckly final deposits use quickly?|
+3425|79|7|2|37|36225.59|0.06|0.03|N|O|1996-06-04|1996-05-09|1996-06-12|NONE|SHIP|as sleep carefully into the caref|
+3425|14|4|3|8|7312.08|0.06|0.08|N|O|1996-07-22|1996-06-07|1996-07-26|TAKE BACK RETURN|AIR|iously regular theodolites wake. s|
+3425|19|10|4|37|34003.37|0.04|0.01|N|O|1996-07-10|1996-05-10|1996-08-02|NONE|SHIP|ngside of the furiously thin dol|
+3425|79|9|5|48|46995.36|0.08|0.04|N|O|1996-04-14|1996-05-25|1996-04-23|TAKE BACK RETURN|AIR|uctions wake fluffily. care|
+3425|148|9|6|24|25155.36|0.05|0.04|N|O|1996-04-22|1996-06-24|1996-04-25|TAKE BACK RETURN|AIR|ajole blithely sl|
+3426|110|5|1|20|20202.20|0.05|0.04|N|O|1996-11-10|1996-12-24|1996-12-01|COLLECT COD|FOB|sits cajole blit|
+3426|14|4|2|19|17366.19|0.10|0.08|N|O|1996-11-02|1997-01-13|1996-11-15|DELIVER IN PERSON|RAIL|slyly special packages oug|
+3426|67|6|3|19|18374.14|0.08|0.05|N|O|1996-12-07|1996-12-15|1996-12-14|DELIVER IN PERSON|FOB|c accounts cajole carefu|
+3426|6|7|4|9|8154.00|0.09|0.05|N|O|1996-12-24|1997-01-14|1997-01-13|NONE|FOB|pecial theodolites haggle fluf|
+3426|49|6|5|31|29420.24|0.07|0.08|N|O|1996-11-11|1996-12-10|1996-12-10|DELIVER IN PERSON|SHIP| even sentiment|
+3427|54|5|1|41|39116.05|0.10|0.01|N|O|1997-09-11|1997-07-03|1997-10-04|COLLECT COD|RAIL|s the carefully|
+3427|189|10|2|24|26140.32|0.02|0.04|N|O|1997-07-01|1997-07-28|1997-07-30|NONE|SHIP|y bold, sly deposits. pendi|
+3427|139|5|3|40|41565.20|0.06|0.05|N|O|1997-06-12|1997-08-19|1997-06-23|COLLECT COD|MAIL|patterns cajole ca|
+3427|119|6|4|31|31592.41|0.08|0.04|N|O|1997-08-12|1997-07-26|1997-08-25|COLLECT COD|RAIL|s are carefull|
+3428|198|9|1|4|4392.76|0.00|0.03|N|O|1996-05-09|1996-06-13|1996-06-02|NONE|REG AIR|sly pending requests int|
+3428|118|9|2|35|35633.85|0.02|0.03|N|O|1996-05-01|1996-06-07|1996-05-20|COLLECT COD|TRUCK|ly regular pinto beans sleep|
+3428|136|7|3|47|48698.11|0.07|0.05|N|O|1996-04-16|1996-06-08|1996-05-05|NONE|REG AIR|y final pinto |
+3429|137|8|1|48|49782.24|0.06|0.02|N|O|1997-04-08|1997-03-09|1997-04-25|TAKE BACK RETURN|SHIP| haggle furiously ir|
+3429|59|7|2|15|14385.75|0.03|0.04|N|O|1997-02-04|1997-03-09|1997-03-01|TAKE BACK RETURN|TRUCK|beans are fu|
+3429|69|4|3|10|9690.60|0.05|0.07|N|O|1997-01-19|1997-02-22|1997-01-25|TAKE BACK RETURN|REG AIR|ackages. quickly e|
+3429|89|10|4|28|27694.24|0.10|0.07|N|O|1997-01-30|1997-03-18|1997-02-17|TAKE BACK RETURN|AIR|nstructions boost. thin|
+3429|165|6|5|45|47932.20|0.10|0.00|N|O|1997-04-21|1997-03-08|1997-05-05|COLLECT COD|REG AIR|ites poach a|
+3430|189|10|1|2|2178.36|0.07|0.06|R|F|1995-03-07|1995-01-28|1995-03-30|TAKE BACK RETURN|MAIL|sh furiously according to the evenly e|
+3430|81|2|2|32|31394.56|0.08|0.00|R|F|1995-01-17|1995-01-28|1995-02-06|NONE|TRUCK|egular instruction|
+3430|97|8|3|41|40880.69|0.06|0.04|R|F|1995-02-18|1995-02-21|1995-03-11|TAKE BACK RETURN|AIR|cuses. silent excuses h|
+3430|65|2|4|50|48253.00|0.01|0.00|R|F|1994-12-15|1995-03-03|1994-12-24|COLLECT COD|REG AIR|ironic theodolites. carefully regular pac|
+3430|95|9|5|5|4975.45|0.05|0.05|A|F|1995-04-02|1995-02-12|1995-04-08|DELIVER IN PERSON|FOB|even accounts haggle slyly bol|
+3430|171|10|6|15|16067.55|0.08|0.07|A|F|1995-02-01|1995-03-12|1995-02-04|COLLECT COD|SHIP|cajole around the accounts. qui|
+3430|52|7|7|23|21897.15|0.09|0.08|A|F|1995-03-06|1995-03-01|1995-03-10|COLLECT COD|MAIL|eas according to the|
+3431|180|8|1|41|44287.38|0.03|0.06|A|F|1993-09-26|1993-10-13|1993-10-22|NONE|AIR| sleep carefully ironically special|
+3456|111|8|1|34|34377.74|0.10|0.06|A|F|1993-08-29|1993-08-26|1993-09-07|TAKE BACK RETURN|SHIP|usy pinto beans b|
+3457|182|3|1|29|31383.22|0.03|0.02|R|F|1995-05-12|1995-07-13|1995-06-05|NONE|TRUCK|refully final excuses wake|
+3457|106|7|2|22|22134.20|0.06|0.01|N|O|1995-06-23|1995-06-16|1995-06-29|NONE|SHIP|packages nag furiously against|
+3457|109|2|3|7|7063.70|0.07|0.08|N|O|1995-08-14|1995-07-06|1995-08-18|COLLECT COD|SHIP| pending accounts along the|
+3457|1|2|4|24|21624.00|0.07|0.07|N|O|1995-08-03|1995-05-30|1995-08-14|TAKE BACK RETURN|REG AIR|tructions haggle alongsid|
+3457|109|4|5|42|42382.20|0.05|0.01|A|F|1995-06-12|1995-06-14|1995-06-14|COLLECT COD|MAIL|riously final instruc|
+3457|144|1|6|45|46986.30|0.08|0.01|N|O|1995-08-12|1995-07-18|1995-08-23|TAKE BACK RETURN|SHIP| packages. care|
+3457|167|4|7|9|9604.44|0.04|0.00|R|F|1995-05-29|1995-06-30|1995-06-12|DELIVER IN PERSON|FOB|quests. foxes sleep quickly|
+3458|133|4|1|48|49590.24|0.06|0.04|R|F|1995-03-17|1995-01-25|1995-03-28|TAKE BACK RETURN|AIR|iously pending dep|
+3458|50|3|2|46|43702.30|0.06|0.06|R|F|1995-03-08|1995-01-21|1995-03-10|TAKE BACK RETURN|SHIP|nod across the boldly even instruct|
+3458|143|4|3|36|37553.04|0.01|0.06|R|F|1995-04-20|1995-02-14|1995-05-09|TAKE BACK RETURN|REG AIR|s lose. blithely ironic requests boost|
+3458|16|10|4|16|14656.16|0.09|0.03|R|F|1995-03-01|1995-02-25|1995-03-06|TAKE BACK RETURN|AIR|s grow carefully. express, final grouc|
+3458|157|5|5|2|2114.30|0.09|0.03|A|F|1995-02-05|1995-02-01|1995-03-07|COLLECT COD|FOB|ironic packages haggle past the furiously |
+3458|142|1|6|6|6252.84|0.09|0.04|A|F|1995-03-10|1995-02-02|1995-03-23|TAKE BACK RETURN|AIR|dolites; regular theodolites cajole |
+3459|179|7|1|31|33454.27|0.06|0.01|A|F|1994-09-05|1994-10-20|1994-10-03|NONE|REG AIR|y regular pain|
+3459|130|9|2|30|30903.90|0.04|0.08|R|F|1994-11-22|1994-09-12|1994-12-11|NONE|REG AIR|nic theodolites; evenly i|
+3459|41|8|3|45|42346.80|0.04|0.05|A|F|1994-07-31|1994-09-09|1994-08-02|TAKE BACK RETURN|REG AIR|ntly speci|
+3459|69|10|4|10|9690.60|0.05|0.06|A|F|1994-10-06|1994-09-16|1994-11-03|TAKE BACK RETURN|REG AIR| furiously silent dolphi|
+3459|189|10|5|10|10891.80|0.02|0.02|R|F|1994-08-01|1994-10-17|1994-08-11|TAKE BACK RETURN|FOB|. blithely ironic pinto beans above|
+3460|11|1|1|40|36440.40|0.10|0.06|N|O|1995-12-28|1995-12-14|1996-01-02|NONE|REG AIR|odolites are slyly bold deposits|
+3460|74|4|2|3|2922.21|0.06|0.00|N|O|1996-01-19|1995-12-28|1996-01-31|COLLECT COD|AIR|er quickly |
+3460|35|1|3|40|37401.20|0.08|0.07|N|O|1995-10-29|1995-11-10|1995-11-24|TAKE BACK RETURN|REG AIR|o the even deposits|
+3460|95|8|4|50|49754.50|0.02|0.07|N|O|1996-01-30|1995-12-10|1996-02-06|DELIVER IN PERSON|SHIP|e slyly about the sly|
+3460|130|1|5|47|48416.11|0.08|0.05|N|O|1995-12-09|1995-11-12|1995-12-22|TAKE BACK RETURN|SHIP|es haggle slyly regular accounts. fi|
+3460|63|10|6|46|44300.76|0.03|0.07|N|O|1996-01-27|1996-01-01|1996-02-01|NONE|TRUCK|uses run among the carefully even deposits|
+3460|45|2|7|28|26461.12|0.00|0.01|N|O|1995-10-28|1995-11-13|1995-11-17|COLLECT COD|SHIP|inal, ironic instructions. carefully|
+3461|100|4|1|49|49004.90|0.06|0.06|A|F|1993-03-09|1993-04-16|1993-03-13|DELIVER IN PERSON|RAIL|ual request|
+3461|63|4|2|27|26002.62|0.06|0.06|A|F|1993-02-10|1993-03-02|1993-03-04|COLLECT COD|SHIP|ely unusual deposits. quickly ir|
+3461|39|5|3|44|41317.32|0.09|0.06|A|F|1993-05-20|1993-04-03|1993-05-27|COLLECT COD|RAIL| haggle quickly even ideas. fin|
+3461|95|7|4|41|40798.69|0.09|0.02|R|F|1993-02-19|1993-04-20|1993-02-21|NONE|TRUCK|heodolites. blithely ironi|
+3461|90|1|5|16|15841.44|0.08|0.06|A|F|1993-05-09|1993-04-29|1993-05-26|TAKE BACK RETURN|TRUCK| pending deposi|
+3461|167|2|6|24|25611.84|0.10|0.00|A|F|1993-06-01|1993-03-12|1993-06-20|TAKE BACK RETURN|MAIL|thely. carefully re|
+3462|151|3|1|4|4204.60|0.09|0.04|N|O|1997-06-12|1997-07-31|1997-06-16|COLLECT COD|RAIL|ackages. fu|
+3462|40|1|2|43|40421.72|0.08|0.03|N|O|1997-08-01|1997-07-18|1997-08-29|NONE|RAIL| carefully. final, final ideas sleep slyly|
+3462|129|4|3|6|6174.72|0.05|0.04|N|O|1997-06-02|1997-08-09|1997-06-30|NONE|RAIL|iously regular fo|
+3462|99|3|4|2|1998.18|0.09|0.07|N|O|1997-09-10|1997-08-08|1997-09-19|NONE|AIR|nic packages. even accounts alongside |
+3462|38|4|5|14|13132.42|0.01|0.02|N|O|1997-05-31|1997-07-05|1997-06-24|COLLECT COD|MAIL|yly. blithely bold theodolites wa|
+3463|61|10|1|45|43247.70|0.02|0.02|A|F|1993-10-30|1993-11-04|1993-11-08|DELIVER IN PERSON|FOB|nts are slyly |
+3463|98|1|2|43|42917.87|0.04|0.02|A|F|1993-10-28|1993-09-24|1993-11-03|DELIVER IN PERSON|FOB| across the |
+3488|160|5|1|1|1060.16|0.04|0.01|A|F|1995-03-06|1995-02-16|1995-03-23|DELIVER IN PERSON|FOB| final excuses. carefully even waters hagg|
+3488|104|9|2|48|48196.80|0.00|0.03|A|F|1995-03-29|1995-03-26|1995-04-28|COLLECT COD|SHIP|sly? final requests |
+3488|160|1|3|11|11661.76|0.03|0.08|R|F|1995-03-25|1995-02-08|1995-04-16|COLLECT COD|TRUCK|unusual re|
+3488|42|9|4|12|11304.48|0.05|0.07|R|F|1995-04-27|1995-02-16|1995-05-09|DELIVER IN PERSON|RAIL|e slyly; furiously final packages wak|
+3488|156|1|5|18|19010.70|0.09|0.06|A|F|1995-03-18|1995-03-19|1995-03-29|DELIVER IN PERSON|FOB|s the carefully r|
+3489|186|7|1|19|20637.42|0.09|0.05|A|F|1993-07-31|1993-10-26|1993-08-15|NONE|SHIP|c deposits alongside of the pending, fu|
+3489|29|4|2|46|42734.92|0.00|0.00|A|F|1993-08-02|1993-10-09|1993-08-10|TAKE BACK RETURN|TRUCK|xcuses? quickly stealthy dependenci|
+3490|92|6|1|43|42659.87|0.05|0.05|N|O|1997-08-04|1997-08-06|1997-08-14|TAKE BACK RETURN|SHIP|. even requests cajol|
+3490|86|7|2|50|49304.00|0.05|0.07|N|O|1997-06-27|1997-08-15|1997-06-28|NONE|RAIL| haggle carefu|
+3490|93|7|3|8|7944.72|0.10|0.04|N|O|1997-08-11|1997-07-25|1997-08-28|COLLECT COD|MAIL|inal deposits use furiousl|
+3491|154|2|1|28|29516.20|0.04|0.03|N|O|1998-09-29|1998-09-08|1998-10-23|COLLECT COD|FOB|ccounts. sly|
+3491|122|3|2|22|22486.64|0.08|0.02|N|O|1998-08-19|1998-08-22|1998-09-03|TAKE BACK RETURN|REG AIR| grow against the boldly pending pinto bea|
+3492|156|7|1|3|3168.45|0.02|0.08|R|F|1994-11-26|1994-12-28|1994-12-19|COLLECT COD|REG AIR|the deposits. carefully |
+3492|126|9|2|7|7182.84|0.04|0.00|R|F|1995-03-10|1995-01-03|1995-03-16|COLLECT COD|FOB|thely regular dolphi|
+3492|109|10|3|34|34309.40|0.05|0.06|A|F|1994-12-07|1994-12-29|1994-12-24|COLLECT COD|AIR| unusual requests. ir|
+3492|147|6|4|30|31414.20|0.02|0.06|A|F|1995-01-29|1995-01-02|1995-02-13|DELIVER IN PERSON|MAIL| detect furiously permanent, unusual accou|
+3492|122|1|5|47|48039.64|0.09|0.07|R|F|1995-03-24|1994-12-28|1995-03-29|NONE|REG AIR|deposits. quickly express |
+3492|22|7|6|47|43334.94|0.04|0.07|R|F|1994-12-12|1995-01-18|1994-12-26|COLLECT COD|RAIL|ronic instructions u|
+3493|93|6|1|31|30785.79|0.06|0.07|R|F|1993-10-22|1993-10-12|1993-11-07|DELIVER IN PERSON|REG AIR|ructions. slyly regular accounts across the|
+3493|132|3|2|10|10321.30|0.02|0.06|R|F|1993-08-27|1993-10-07|1993-09-23|COLLECT COD|TRUCK|hall have to integ|
+3494|117|1|1|40|40684.40|0.05|0.04|R|F|1993-07-10|1993-06-01|1993-07-25|TAKE BACK RETURN|TRUCK|lites haggle furiously about the fin|
+3494|75|6|2|23|22426.61|0.10|0.01|A|F|1993-06-19|1993-06-04|1993-07-14|NONE|FOB|osits nag |
+3494|198|2|3|40|43927.60|0.02|0.08|A|F|1993-05-30|1993-07-02|1993-06-20|TAKE BACK RETURN|MAIL|uests cajole blithely|
+3494|77|8|4|30|29312.10|0.04|0.03|R|F|1993-07-01|1993-06-08|1993-07-15|TAKE BACK RETURN|TRUCK|ns are quickly regular, |
+3495|28|3|1|20|18560.40|0.10|0.03|N|O|1996-04-24|1996-05-18|1996-05-01|TAKE BACK RETURN|RAIL|posits are carefully; forges cajole qui|
+3495|173|1|2|24|25756.08|0.05|0.02|N|O|1996-03-22|1996-04-10|1996-04-07|DELIVER IN PERSON|RAIL|ic, final pains along the even request|
+3495|199|10|3|16|17587.04|0.08|0.02|N|O|1996-03-30|1996-04-02|1996-04-12|TAKE BACK RETURN|AIR|y bold dependencies; blithely idle sautern|
+3520|28|1|1|30|27840.60|0.04|0.02|N|O|1997-11-11|1997-10-02|1997-12-06|COLLECT COD|SHIP|deas should solve blithely among the ironi|
+3520|167|4|2|38|40552.08|0.00|0.04|N|O|1997-08-14|1997-10-26|1997-09-09|NONE|RAIL|yly final packages according to the quickl|
+3520|106|9|3|5|5030.50|0.01|0.02|N|O|1997-11-13|1997-09-22|1997-12-09|NONE|MAIL|ly even ideas haggle |
+3520|64|5|4|41|39526.46|0.01|0.01|N|O|1997-08-06|1997-09-20|1997-08-20|TAKE BACK RETURN|AIR| carefully pendi|
+3520|163|10|5|35|37210.60|0.02|0.02|N|O|1997-09-16|1997-09-03|1997-09-24|DELIVER IN PERSON|FOB|s nag carefully. sometimes unusual account|
+3521|59|4|1|48|46034.40|0.09|0.03|A|F|1993-01-03|1992-12-31|1993-01-22|NONE|AIR|ses use. furiously express ideas wake f|
+3521|131|2|2|2|2062.26|0.05|0.06|R|F|1993-01-29|1992-12-20|1993-02-23|NONE|MAIL|refully duri|
+3521|178|8|3|38|40970.46|0.00|0.08|A|F|1993-02-15|1992-12-10|1993-03-10|COLLECT COD|FOB|ges hang q|
+3521|144|7|4|26|27147.64|0.02|0.08|R|F|1993-01-04|1993-01-20|1993-01-17|DELIVER IN PERSON|AIR|onic dependencies haggle. fur|
+3521|36|7|5|28|26208.84|0.10|0.01|A|F|1993-01-06|1993-01-22|1993-02-02|TAKE BACK RETURN|FOB|e slyly above the slyly final|
+3522|4|9|1|6|5424.00|0.08|0.03|A|F|1995-01-21|1994-12-09|1995-01-23|NONE|SHIP|tes snooze |
+3522|87|8|2|48|47379.84|0.00|0.03|R|F|1994-12-05|1994-10-30|1994-12-26|TAKE BACK RETURN|SHIP|ve the quickly special packages|
+3522|157|2|3|46|48628.90|0.09|0.02|A|F|1994-11-12|1994-11-30|1994-11-20|NONE|AIR|d the express, silent foxes. blit|
+3522|130|9|4|7|7210.91|0.10|0.02|A|F|1994-10-31|1994-11-19|1994-11-28|NONE|TRUCK|e stealthil|
+3522|50|9|5|27|25651.35|0.02|0.05|R|F|1994-11-29|1994-12-15|1994-12-08|COLLECT COD|REG AIR|ic tithes. car|
+3522|158|10|6|18|19046.70|0.01|0.03|A|F|1994-11-16|1994-10-29|1994-11-29|COLLECT COD|RAIL|sits wake carefully pen|
+3523|25|6|1|15|13875.30|0.06|0.02|N|O|1998-06-26|1998-05-22|1998-07-24|COLLECT COD|REG AIR|se slyly pending, sp|
+3523|133|9|2|4|4132.52|0.03|0.06|N|O|1998-05-08|1998-05-18|1998-05-25|TAKE BACK RETURN|MAIL|ts. final accounts detect furiously along |
+3523|50|7|3|24|22801.20|0.07|0.04|N|O|1998-08-02|1998-06-22|1998-08-27|COLLECT COD|FOB|ke according to the doggedly re|
+3523|192|4|4|36|39318.84|0.06|0.08|N|O|1998-05-26|1998-06-04|1998-06-25|DELIVER IN PERSON|SHIP|accounts. fluffily regu|
+3523|134|5|5|48|49638.24|0.00|0.01|N|O|1998-07-22|1998-06-25|1998-08-19|DELIVER IN PERSON|AIR| regular requests|
+3524|137|8|1|5|5185.65|0.01|0.04|R|F|1992-05-23|1992-07-25|1992-06-19|DELIVER IN PERSON|RAIL|ts whithout the bold depende|
+3524|143|6|2|17|17733.38|0.09|0.08|A|F|1992-09-01|1992-07-17|1992-09-05|DELIVER IN PERSON|FOB|g, final epitaphs about the pinto |
+3525|46|7|1|12|11352.48|0.01|0.03|N|O|1996-03-08|1996-03-18|1996-03-16|NONE|TRUCK|lar excuses wake carefull|
+3525|138|9|2|27|28029.51|0.03|0.03|N|O|1995-12-30|1996-01-23|1996-01-02|DELIVER IN PERSON|SHIP|y slyly special asymptotes|
+3525|75|5|3|31|30227.17|0.00|0.03|N|O|1996-03-08|1996-02-27|1996-03-13|COLLECT COD|TRUCK|he careful|
+3525|184|5|4|28|30357.04|0.03|0.02|N|O|1996-01-22|1996-02-08|1996-01-27|COLLECT COD|FOB| nag according |
+3526|98|9|1|11|10978.99|0.02|0.03|R|F|1995-05-23|1995-05-28|1995-05-24|NONE|TRUCK|ges. furiously regular d|
+3526|117|7|2|23|23393.53|0.03|0.04|A|F|1995-05-01|1995-05-31|1995-05-25|DELIVER IN PERSON|FOB|special, regular packages cajole. |
+3526|33|9|3|20|18660.60|0.05|0.08|N|F|1995-06-16|1995-04-26|1995-06-22|DELIVER IN PERSON|REG AIR|kages. bold, special requests detect sl|
+3527|102|7|1|47|47098.70|0.07|0.02|N|O|1997-07-14|1997-07-29|1997-07-21|DELIVER IN PERSON|RAIL|unts. express re|
+3527|26|9|2|33|30558.66|0.01|0.02|N|O|1997-09-25|1997-09-17|1997-10-12|NONE|FOB|kly alongside of |
+3527|162|7|3|50|53108.00|0.09|0.07|N|O|1997-07-17|1997-08-03|1997-07-29|DELIVER IN PERSON|SHIP|e even accounts was about th|
+3527|128|3|4|17|17478.04|0.02|0.05|N|O|1997-07-30|1997-09-01|1997-08-17|COLLECT COD|MAIL|ular instruction|
+3552|197|8|1|18|19749.42|0.01|0.07|N|O|1997-08-11|1997-07-14|1997-08-15|DELIVER IN PERSON|TRUCK|s deposits against the blithely unusual pin|
+3552|90|1|2|44|43563.96|0.01|0.00|N|O|1997-08-08|1997-06-15|1997-08-29|COLLECT COD|FOB|ns after the blithely reg|
+3552|161|6|3|36|38201.76|0.04|0.08|N|O|1997-06-29|1997-06-24|1997-07-21|COLLECT COD|TRUCK|ly regular theodolites. fin|
+3553|143|10|1|4|4172.56|0.05|0.01|R|F|1994-06-13|1994-07-10|1994-07-03|COLLECT COD|RAIL|olites boost bli|
+3553|65|4|2|26|25091.56|0.05|0.08|A|F|1994-08-06|1994-07-30|1994-08-23|DELIVER IN PERSON|MAIL|fily special p|
+3553|22|5|3|18|16596.36|0.04|0.03|A|F|1994-07-03|1994-06-30|1994-07-07|COLLECT COD|RAIL|. quickly ironic|
+3553|32|8|4|40|37281.20|0.06|0.00|A|F|1994-09-14|1994-06-26|1994-09-25|NONE|RAIL| slyly pending asymptotes against the furi|
+3553|157|2|5|36|38057.40|0.06|0.08|R|F|1994-08-12|1994-06-25|1994-09-06|DELIVER IN PERSON|TRUCK| realms. pending, bold theodolites |
+3554|175|5|1|32|34405.44|0.01|0.05|N|O|1995-09-28|1995-09-01|1995-10-07|NONE|RAIL|. blithely ironic t|
+3554|145|6|2|18|18812.52|0.03|0.00|N|O|1995-09-11|1995-08-12|1995-10-04|DELIVER IN PERSON|REG AIR| haggle. furiously fluffy requests ac|
+3554|192|3|3|41|44779.79|0.02|0.01|N|O|1995-07-13|1995-08-28|1995-07-27|DELIVER IN PERSON|MAIL|ent dependencies. sly|
+3555|166|3|1|11|11727.76|0.05|0.02|N|O|1996-09-25|1996-10-01|1996-10-03|NONE|FOB|oost caref|
+3555|79|10|2|15|14686.05|0.03|0.08|N|O|1996-07-13|1996-09-01|1996-08-02|TAKE BACK RETURN|RAIL|y across the pending a|
+3555|43|2|3|25|23576.00|0.09|0.07|N|O|1996-10-01|1996-08-23|1996-10-24|TAKE BACK RETURN|MAIL|sual packages. quickly |
+3555|5|6|4|19|17195.00|0.00|0.05|N|O|1996-09-08|1996-09-14|1996-10-01|COLLECT COD|REG AIR|leep special theodolit|
+3555|33|4|5|29|27057.87|0.07|0.04|N|O|1996-08-02|1996-09-04|1996-08-08|DELIVER IN PERSON|TRUCK|deas. carefully s|
+3555|28|3|6|33|30624.66|0.04|0.08|N|O|1996-09-20|1996-09-23|1996-10-05|TAKE BACK RETURN|AIR|fluffily regular a|
+3555|126|5|7|9|9235.08|0.07|0.02|N|O|1996-10-13|1996-10-02|1996-10-22|NONE|SHIP|are. slyly final foxes acro|
+3556|142|9|1|45|46896.30|0.05|0.06|A|F|1992-10-14|1992-12-21|1992-10-16|NONE|TRUCK|ckages boost quickl|
+3556|31|2|2|43|40034.29|0.02|0.06|R|F|1993-01-18|1992-11-09|1993-02-04|NONE|FOB|wake carefull|
+3556|87|8|3|28|27638.24|0.10|0.04|A|F|1993-01-06|1992-11-27|1993-01-16|NONE|MAIL|refully final instructions? ironic packa|
+3557|175|3|1|41|44081.97|0.01|0.07|R|F|1993-01-30|1992-12-31|1993-02-18|COLLECT COD|FOB|ideas breach c|
+3557|129|10|2|37|38077.44|0.03|0.05|R|F|1993-02-16|1993-01-05|1993-03-15|DELIVER IN PERSON|RAIL|gside of the ca|
+3558|87|8|1|8|7896.64|0.01|0.03|N|O|1996-05-31|1996-05-26|1996-06-25|COLLECT COD|AIR|? even requests sle|
+3558|10|7|2|28|25480.28|0.02|0.08|N|O|1996-06-02|1996-04-18|1996-06-24|COLLECT COD|TRUCK|l deposits |
+3558|187|8|3|3|3261.54|0.03|0.06|N|O|1996-05-19|1996-04-28|1996-05-26|DELIVER IN PERSON|RAIL|l, final deposits haggle. fina|
+3558|91|5|4|22|21803.98|0.06|0.03|N|O|1996-04-27|1996-04-19|1996-04-30|DELIVER IN PERSON|SHIP|refully ironic theodolites are fu|
+3558|29|8|5|38|35302.76|0.03|0.08|N|O|1996-05-29|1996-05-02|1996-06-09|COLLECT COD|RAIL|refully permanently iron|
+3558|72|1|6|17|16525.19|0.07|0.07|N|O|1996-03-14|1996-05-04|1996-04-05|NONE|RAIL|ithely unusual packa|
+3559|90|1|1|29|28712.61|0.00|0.07|R|F|1992-12-10|1992-12-03|1992-12-20|COLLECT COD|REG AIR|l, regular accounts wake flu|
+3584|11|8|1|4|3644.04|0.04|0.08|N|O|1997-08-16|1997-10-31|1997-08-28|DELIVER IN PERSON|TRUCK|nal packag|
+3584|160|8|2|23|24383.68|0.00|0.03|N|O|1997-09-10|1997-10-15|1997-09-30|COLLECT COD|TRUCK|l platelets until the asymptotes |
+3584|24|5|3|6|5544.12|0.03|0.06|N|O|1997-10-28|1997-11-09|1997-11-24|TAKE BACK RETURN|MAIL|deposits across the|
+3584|146|5|4|11|11507.54|0.06|0.02|N|O|1997-11-27|1997-10-15|1997-12-08|NONE|REG AIR|lithely slyly |
+3584|18|5|5|39|35802.39|0.09|0.07|N|O|1997-09-20|1997-10-31|1997-10-06|COLLECT COD|AIR|eposits. carefu|
+3585|122|1|1|21|21464.52|0.05|0.04|A|F|1994-12-04|1994-12-25|1995-01-01|TAKE BACK RETURN|TRUCK|ounts use. express, final platelets us|
+3585|19|10|2|40|36760.40|0.03|0.00|R|F|1995-01-22|1995-01-17|1995-02-07|TAKE BACK RETURN|RAIL|elets affix. even asymptotes play care|
+3585|112|2|3|11|11133.21|0.01|0.04|R|F|1995-01-04|1995-02-14|1995-01-15|NONE|MAIL|even packages|
+3585|48|1|4|33|31285.32|0.08|0.08|A|F|1994-12-14|1995-01-19|1994-12-22|NONE|RAIL|ironic dependencies serve furi|
+3585|25|8|5|13|12025.26|0.06|0.07|R|F|1995-03-15|1995-01-22|1995-03-17|DELIVER IN PERSON|AIR|ccording to the foxes. slyly iro|
+3585|94|7|6|7|6958.63|0.10|0.02|A|F|1994-12-13|1995-01-20|1995-01-05|TAKE BACK RETURN|TRUCK|dependencies sleep un|
+3585|42|1|7|45|42391.80|0.03|0.00|A|F|1995-01-20|1995-02-19|1995-02-11|DELIVER IN PERSON|MAIL|are blithely c|
+3586|194|7|1|2|2188.38|0.03|0.08|R|F|1994-02-10|1994-01-07|1994-03-03|DELIVER IN PERSON|RAIL|he even, unusual decoy|
+3586|84|5|2|29|28538.32|0.04|0.07|R|F|1994-03-06|1994-03-02|1994-03-13|DELIVER IN PERSON|RAIL| slyly unusual i|
+3586|58|3|3|2|1916.10|0.03|0.06|R|F|1994-03-22|1994-02-20|1994-04-08|NONE|REG AIR|unts. slyly final ideas agai|
+3586|84|5|4|33|32474.64|0.06|0.01|R|F|1994-01-24|1994-02-09|1994-02-07|NONE|TRUCK|refully across the fur|
+3586|108|1|5|8|8064.80|0.06|0.02|A|F|1994-03-29|1994-02-26|1994-04-02|NONE|FOB|theodolites hagg|
+3586|99|1|6|8|7992.72|0.09|0.01|A|F|1994-03-18|1994-01-17|1994-04-06|DELIVER IN PERSON|RAIL| ironic pinto beans cajole carefully theo|
+3586|123|4|7|33|33762.96|0.05|0.04|A|F|1994-02-11|1994-01-15|1994-03-03|NONE|REG AIR|iously regular pinto beans integrate|
+3587|197|10|1|5|5485.95|0.09|0.07|N|O|1996-09-03|1996-07-05|1996-09-11|DELIVER IN PERSON|SHIP|ithely regular decoys above the |
+3587|132|8|2|48|49542.24|0.00|0.03|N|O|1996-08-02|1996-07-02|1996-08-05|TAKE BACK RETURN|MAIL|beans. blithely final depe|
+3587|151|3|3|36|37841.40|0.05|0.05|N|O|1996-07-26|1996-06-16|1996-08-23|TAKE BACK RETURN|MAIL|ully regular excuse|
+3587|124|9|4|31|31747.72|0.03|0.01|N|O|1996-07-21|1996-07-01|1996-07-23|COLLECT COD|SHIP|press fluffily regul|
+3587|70|7|5|12|11640.84|0.06|0.03|N|O|1996-08-30|1996-07-04|1996-09-22|DELIVER IN PERSON|RAIL|g the even pinto beans. special,|
+3587|107|2|6|16|16113.60|0.01|0.03|N|O|1996-05-11|1996-06-19|1996-06-04|COLLECT COD|FOB|y ruthless dolphins to |
+3587|74|2|7|23|22403.61|0.07|0.05|N|O|1996-08-30|1996-07-01|1996-09-10|COLLECT COD|FOB|l multipliers sleep theodolites-- slyly |
+3588|91|5|1|28|27750.52|0.04|0.08|R|F|1995-05-03|1995-05-03|1995-05-14|DELIVER IN PERSON|TRUCK|special pinto beans cajole slyly. slyly |
+3588|88|9|2|6|5928.48|0.06|0.08|A|F|1995-04-09|1995-05-30|1995-04-10|TAKE BACK RETURN|MAIL|s. fluffily fluf|
+3588|159|10|3|45|47661.75|0.04|0.02|R|F|1995-05-07|1995-05-04|1995-05-28|TAKE BACK RETURN|TRUCK|ecial pains integrate blithely. reques|
+3588|127|10|4|22|22596.64|0.05|0.00|A|F|1995-04-08|1995-05-06|1995-04-27|NONE|RAIL|inal accounts. pending, bo|
+3588|55|3|5|28|26741.40|0.03|0.03|A|F|1995-04-23|1995-05-25|1995-04-28|DELIVER IN PERSON|TRUCK| express sheaves. unusual theodo|
+3588|110|3|6|37|37374.07|0.08|0.04|N|F|1995-06-17|1995-05-25|1995-06-24|TAKE BACK RETURN|RAIL|xcuses sleep quickly along th|
+3588|39|5|7|46|43195.38|0.08|0.07|A|F|1995-06-06|1995-05-08|1995-06-08|NONE|AIR| slyly ironic deposits sublate ab|
+3589|37|3|1|42|39355.26|0.08|0.08|R|F|1994-08-11|1994-07-17|1994-08-23|DELIVER IN PERSON|AIR|he blithely unusual pac|
+3590|176|6|1|10|10761.70|0.08|0.00|N|O|1995-07-17|1995-06-26|1995-08-12|TAKE BACK RETURN|SHIP|t the quickly ironic|
+3590|95|6|2|19|18906.71|0.03|0.03|N|O|1995-08-02|1995-06-20|1995-08-08|NONE|SHIP|special pinto beans. blithely reg|
+3590|96|9|3|43|42831.87|0.07|0.06|N|O|1995-07-12|1995-07-25|1995-07-16|DELIVER IN PERSON|SHIP|s could have to use|
+3590|56|8|4|26|24857.30|0.01|0.03|N|O|1995-07-08|1995-06-17|1995-08-02|DELIVER IN PERSON|SHIP|arefully along th|
+3590|191|2|5|37|40374.03|0.00|0.08|N|O|1995-09-01|1995-06-29|1995-09-10|NONE|SHIP|ccounts above the silent waters thrash f|
+3590|119|10|6|31|31592.41|0.03|0.01|N|O|1995-06-24|1995-07-12|1995-06-25|DELIVER IN PERSON|REG AIR|ve furiously final instructions. slyly regu|
+3590|194|7|7|44|48144.36|0.05|0.04|N|F|1995-06-07|1995-06-15|1995-06-27|NONE|MAIL|s sleep after the regular platelets. blit|
+3591|29|8|1|21|19509.42|0.03|0.03|A|F|1994-02-25|1994-02-02|1994-03-05|DELIVER IN PERSON|TRUCK|structions against |
+3591|69|6|2|24|23257.44|0.04|0.04|R|F|1993-12-26|1994-01-07|1994-01-25|COLLECT COD|FOB|ages. slyly regular dependencies cajo|
+3591|164|9|3|4|4256.64|0.01|0.03|A|F|1994-04-04|1994-02-19|1994-05-02|DELIVER IN PERSON|RAIL|he final packages. deposits serve quick|
+3591|153|4|4|49|51604.35|0.01|0.00|A|F|1994-03-21|1994-01-26|1994-03-28|COLLECT COD|AIR| mold slyly. bl|
+3616|197|9|1|30|32915.70|0.01|0.00|A|F|1994-05-05|1994-04-24|1994-05-12|TAKE BACK RETURN|FOB|ly ironic accounts unwind b|
+3616|138|9|2|28|29067.64|0.08|0.06|R|F|1994-02-20|1994-04-18|1994-03-05|DELIVER IN PERSON|REG AIR|ironic packages. furiously ev|
+3617|117|8|1|46|46787.06|0.03|0.02|N|O|1996-05-19|1996-05-14|1996-06-11|NONE|RAIL|ar theodolites. regu|
+3617|98|9|2|16|15969.44|0.05|0.02|N|O|1996-05-08|1996-06-03|1996-05-19|COLLECT COD|RAIL| slyly on th|
+3617|98|2|3|32|31938.88|0.00|0.06|N|O|1996-04-20|1996-06-07|1996-05-19|DELIVER IN PERSON|MAIL|uriously against the express accounts. ex|
+3617|41|10|4|22|20702.88|0.10|0.05|N|O|1996-07-11|1996-05-02|1996-07-25|NONE|REG AIR|uffily even accounts. packages sleep blithe|
+3617|137|8|5|11|11408.43|0.08|0.05|N|O|1996-07-16|1996-04-23|1996-07-28|COLLECT COD|MAIL|ly quickly even requests. final|
+3618|140|1|1|38|39525.32|0.08|0.00|N|O|1997-12-22|1998-02-23|1998-01-03|TAKE BACK RETURN|TRUCK|nts haggle fluffily above the regular |
+3618|144|5|2|48|50118.72|0.04|0.00|N|O|1998-03-12|1998-02-13|1998-03-29|DELIVER IN PERSON|TRUCK|tructions atop the ironi|
+3618|63|2|3|24|23113.44|0.01|0.04|N|O|1998-01-26|1998-01-15|1998-02-17|TAKE BACK RETURN|AIR|xpress acc|
+3618|161|2|4|26|27590.16|0.01|0.05|N|O|1998-03-23|1998-01-24|1998-04-15|DELIVER IN PERSON|AIR|iously regular deposits cajole ruthless|
+3619|96|7|1|49|48808.41|0.01|0.08|N|O|1997-01-22|1996-12-21|1997-02-17|TAKE BACK RETURN|MAIL| waters. furiously even deposits |
+3619|116|10|2|27|27434.97|0.08|0.04|N|O|1996-12-12|1997-01-18|1996-12-18|TAKE BACK RETURN|SHIP|pecial accounts haggle care|
+3619|48|7|3|46|43609.84|0.08|0.03|N|O|1997-01-31|1997-01-27|1997-02-11|NONE|SHIP|press, expres|
+3619|93|6|4|18|17875.62|0.04|0.02|N|O|1997-03-18|1996-12-24|1997-03-21|COLLECT COD|AIR|eodolites |
+3619|120|10|5|38|38764.56|0.05|0.08|N|O|1996-12-08|1997-02-03|1997-01-07|NONE|RAIL|theodolites detect abo|
+3619|152|3|6|43|45242.45|0.01|0.01|N|O|1997-01-25|1997-01-06|1997-02-07|COLLECT COD|RAIL| bold, even|
+3620|59|7|1|41|39321.05|0.03|0.08|N|O|1997-03-21|1997-04-20|1997-03-30|COLLECT COD|FOB|t attainments cajole qui|
+3620|167|4|2|16|17074.56|0.00|0.06|N|O|1997-05-17|1997-05-08|1997-06-03|COLLECT COD|SHIP|s. even, pending in|
+3621|17|8|1|29|26593.29|0.02|0.06|A|F|1993-08-03|1993-07-08|1993-08-10|DELIVER IN PERSON|FOB|al requests. fl|
+3621|93|5|2|13|12910.17|0.09|0.04|R|F|1993-08-30|1993-06-30|1993-09-01|NONE|REG AIR|r the unusual packages. brave theodoli|
+3621|164|9|3|45|47887.20|0.07|0.07|R|F|1993-08-09|1993-06-18|1993-09-05|DELIVER IN PERSON|AIR| doubt about the bold deposits. carefully|
+3621|44|3|4|20|18880.80|0.05|0.04|R|F|1993-05-27|1993-07-04|1993-06-22|TAKE BACK RETURN|SHIP|gular accounts use carefully with|
+3622|175|6|1|47|50532.99|0.09|0.00|N|O|1996-02-24|1996-02-22|1996-03-12|TAKE BACK RETURN|TRUCK|are careful|
+3622|89|10|2|4|3956.32|0.04|0.04|N|O|1996-02-03|1996-02-19|1996-02-16|TAKE BACK RETURN|TRUCK|lithely brave foxes. furi|
+3622|190|1|3|46|50148.74|0.07|0.07|N|O|1995-12-18|1996-01-23|1996-01-12|TAKE BACK RETURN|AIR|sits wake. blithe|
+3622|177|8|4|9|9694.53|0.08|0.05|N|O|1995-12-12|1996-02-09|1995-12-13|TAKE BACK RETURN|SHIP|arefully. furiously regular ideas n|
+3623|80|10|1|32|31362.56|0.05|0.00|N|O|1997-04-18|1997-03-15|1997-05-09|COLLECT COD|SHIP| courts. furiously regular ideas b|
+3623|117|4|2|33|33564.63|0.08|0.01|N|O|1997-03-17|1997-02-13|1997-04-02|TAKE BACK RETURN|TRUCK|odolites. blithely spe|
+3623|24|7|3|21|19404.42|0.02|0.02|N|O|1997-01-19|1997-03-18|1997-01-24|NONE|FOB|ress ideas are furio|
+3623|165|2|4|42|44736.72|0.05|0.06|N|O|1997-01-11|1997-03-24|1997-01-21|COLLECT COD|RAIL|g to the slyly regular packa|
+3623|88|9|5|30|29642.40|0.10|0.04|N|O|1997-04-04|1997-03-03|1997-05-01|NONE|RAIL| ironic somas sleep fluffily|
+3623|186|7|6|7|7603.26|0.01|0.02|N|O|1997-01-05|1997-03-26|1997-01-26|NONE|TRUCK|aves. slyly special packages cajole. fu|
+3623|140|6|7|13|13521.82|0.03|0.08|N|O|1997-01-02|1997-02-26|1997-01-26|DELIVER IN PERSON|SHIP|deas. furiously expres|
+3648|144|5|1|16|16706.24|0.02|0.06|A|F|1993-08-14|1993-08-14|1993-08-15|COLLECT COD|FOB|s nag packages.|
+3648|105|2|2|30|30153.00|0.00|0.01|R|F|1993-08-31|1993-09-06|1993-09-06|DELIVER IN PERSON|FOB| above the somas boost furious|
+3648|46|7|3|34|32165.36|0.10|0.00|A|F|1993-08-21|1993-07-25|1993-09-15|DELIVER IN PERSON|FOB| deposits are furiously. careful, |
+3648|13|10|4|16|14608.16|0.06|0.03|R|F|1993-07-27|1993-08-26|1993-08-24|DELIVER IN PERSON|FOB|uriously stealthy deposits haggle furi|
+3648|117|7|5|25|25427.75|0.06|0.03|R|F|1993-08-15|1993-08-25|1993-09-09|TAKE BACK RETURN|TRUCK|s requests. silent asymp|
+3648|169|10|6|14|14968.24|0.08|0.06|R|F|1993-10-02|1993-08-26|1993-10-09|COLLECT COD|AIR|sly pending excuses. carefully i|
+3648|195|6|7|49|53664.31|0.09|0.03|R|F|1993-06-27|1993-07-27|1993-07-24|TAKE BACK RETURN|FOB|egular instructions. slyly regular pinto|
+3649|5|6|1|25|22625.00|0.10|0.04|A|F|1994-10-27|1994-08-23|1994-11-05|TAKE BACK RETURN|TRUCK|special re|
+3649|89|10|2|23|22748.84|0.08|0.00|R|F|1994-09-26|1994-10-01|1994-09-28|NONE|REG AIR|rs promise blithe|
+3649|70|7|3|14|13580.98|0.02|0.04|A|F|1994-09-19|1994-08-17|1994-10-12|DELIVER IN PERSON|TRUCK|ithely bold accounts wake |
+3649|76|4|4|40|39042.80|0.00|0.08|R|F|1994-07-20|1994-08-30|1994-08-14|TAKE BACK RETURN|RAIL|luffy somas sleep quickly-- ironic de|
+3649|100|1|5|24|24002.40|0.05|0.03|A|F|1994-07-07|1994-08-20|1994-07-27|TAKE BACK RETURN|FOB|c accounts. quickly final theodo|
+3649|122|3|6|3|3066.36|0.10|0.04|A|F|1994-07-17|1994-08-10|1994-08-03|NONE|FOB|lly bold requests nag; |
+3650|136|2|1|30|31083.90|0.10|0.00|A|F|1992-08-26|1992-07-05|1992-09-01|DELIVER IN PERSON|SHIP|ckly special platelets. furiously sil|
+3650|128|9|2|43|44209.16|0.05|0.05|A|F|1992-09-07|1992-08-12|1992-09-10|COLLECT COD|TRUCK|gside of the quick|
+3650|2|9|3|1|902.00|0.04|0.06|A|F|1992-06-23|1992-07-18|1992-07-08|NONE|REG AIR|re about the pinto |
+3650|63|2|4|31|29854.86|0.10|0.08|R|F|1992-06-15|1992-07-01|1992-07-15|DELIVER IN PERSON|RAIL| against the ironic accounts cajol|
+3650|187|8|5|19|20656.42|0.05|0.04|R|F|1992-08-29|1992-08-09|1992-09-21|DELIVER IN PERSON|AIR|y even forges. fluffily furious accounts|
+3650|94|8|6|27|26840.43|0.07|0.08|A|F|1992-07-03|1992-07-23|1992-07-13|COLLECT COD|MAIL|ular requests snooze fluffily regular pi|
+3650|70|7|7|43|41713.01|0.10|0.07|A|F|1992-06-25|1992-07-09|1992-07-22|DELIVER IN PERSON|RAIL|structions use caref|
+3651|19|9|1|20|18380.20|0.01|0.04|N|O|1998-06-10|1998-06-06|1998-06-23|NONE|SHIP|tect quickly among the r|
+3651|155|7|2|24|25323.60|0.09|0.04|N|O|1998-06-22|1998-07-17|1998-07-10|DELIVER IN PERSON|RAIL|excuses haggle according to th|
+3651|113|10|3|41|41537.51|0.00|0.05|N|O|1998-05-10|1998-07-09|1998-05-13|NONE|RAIL|blithely. furiously |
+3651|110|5|4|27|27272.97|0.05|0.03|N|O|1998-05-03|1998-06-30|1998-05-05|DELIVER IN PERSON|RAIL| sleep blithely furiously do|
+3652|180|8|1|24|25924.32|0.05|0.03|N|O|1997-06-07|1997-04-07|1997-06-12|COLLECT COD|MAIL|the final p|
+3652|137|8|2|37|38373.81|0.02|0.05|N|O|1997-05-11|1997-04-06|1997-06-05|COLLECT COD|MAIL|osits haggle carefu|
+3652|163|8|3|39|41463.24|0.01|0.02|N|O|1997-03-10|1997-04-03|1997-03-21|NONE|REG AIR|y express instructions. un|
+3652|80|9|4|1|980.08|0.01|0.04|N|O|1997-04-20|1997-05-03|1997-05-18|DELIVER IN PERSON|SHIP| bold dependencies sublate. r|
+3653|145|4|1|38|39715.32|0.08|0.05|A|F|1994-06-26|1994-05-13|1994-07-13|NONE|REG AIR|ainst the |
+3653|64|1|2|29|27957.74|0.07|0.01|A|F|1994-04-11|1994-06-11|1994-04-29|COLLECT COD|RAIL|ording to the special, final|
+3653|181|2|3|17|18380.06|0.09|0.03|R|F|1994-06-24|1994-06-02|1994-07-17|DELIVER IN PERSON|RAIL|gle slyly regular|
+3653|186|7|4|9|9775.62|0.10|0.07|R|F|1994-04-03|1994-05-19|1994-04-10|COLLECT COD|FOB|slyly silent account|
+3653|188|9|5|41|44615.38|0.08|0.01|A|F|1994-06-18|1994-05-18|1994-06-20|COLLECT COD|RAIL|onic packages affix sly|
+3653|43|4|6|9|8487.36|0.05|0.03|A|F|1994-07-21|1994-05-31|1994-08-17|NONE|MAIL|tes: blithely bo|
+3653|49|6|7|2|1898.08|0.06|0.03|R|F|1994-06-02|1994-05-31|1994-06-29|NONE|FOB|n accounts. fina|
+3654|165|2|1|46|48997.36|0.08|0.05|A|F|1992-06-05|1992-08-19|1992-06-06|DELIVER IN PERSON|FOB|usly regular foxes. furio|
+3654|93|4|2|29|28799.61|0.07|0.06|A|F|1992-09-11|1992-07-20|1992-10-04|DELIVER IN PERSON|FOB|odolites detect. quickly r|
+3654|2|7|3|37|33374.00|0.07|0.05|A|F|1992-09-22|1992-07-20|1992-10-19|TAKE BACK RETURN|RAIL|unts doze bravely ab|
+3654|168|9|4|11|11749.76|0.08|0.00|A|F|1992-07-20|1992-07-30|1992-07-23|TAKE BACK RETURN|SHIP|quickly along the express, ironic req|
+3654|94|5|5|34|33799.06|0.04|0.00|R|F|1992-07-26|1992-08-26|1992-08-12|TAKE BACK RETURN|REG AIR| the quick|
+3654|107|4|6|20|20142.00|0.03|0.02|A|F|1992-07-30|1992-07-05|1992-08-05|COLLECT COD|SHIP|s sleep about the slyly |
+3654|173|1|7|45|48292.65|0.01|0.07|A|F|1992-09-15|1992-07-04|1992-09-20|DELIVER IN PERSON|FOB|sly ironic notornis nag slyly|
+3655|184|5|1|5|5420.90|0.03|0.04|R|F|1993-01-17|1992-12-31|1993-01-23|DELIVER IN PERSON|TRUCK|riously bold pinto be|
+3655|97|10|2|1|997.09|0.10|0.06|R|F|1992-10-24|1992-12-18|1992-11-07|DELIVER IN PERSON|AIR|arefully slow pinto beans are|
+3655|30|5|3|35|32551.05|0.01|0.04|R|F|1992-12-20|1992-11-16|1993-01-15|TAKE BACK RETURN|MAIL|blithely even accounts! furiously regular|
+3655|72|3|4|35|34022.45|0.04|0.07|R|F|1992-10-17|1992-12-23|1992-10-28|COLLECT COD|MAIL|ng foxes cajole fluffily slyly final fo|
+3680|177|6|1|48|51704.16|0.00|0.06|R|F|1993-01-16|1993-01-23|1993-01-19|COLLECT COD|FOB|packages. quickly fluff|
+3680|5|8|2|41|37105.00|0.00|0.04|A|F|1993-01-06|1993-03-02|1993-01-08|NONE|FOB|iously ironic platelets in|
+3680|56|4|3|33|31549.65|0.09|0.08|R|F|1993-03-16|1993-02-19|1993-04-05|NONE|FOB|ts. ironic, fina|
+3681|106|9|1|35|35213.50|0.03|0.08|R|F|1992-07-31|1992-05-18|1992-08-07|COLLECT COD|FOB|lyly special pinto |
+3682|61|10|1|6|5766.36|0.07|0.02|N|O|1997-05-06|1997-04-04|1997-05-11|NONE|AIR|ronic deposits wake slyly. ca|
+3682|116|7|2|18|18289.98|0.06|0.06|N|O|1997-04-30|1997-03-21|1997-05-10|NONE|FOB|regular dependencies|
+3682|47|10|3|17|16099.68|0.03|0.05|N|O|1997-02-12|1997-04-04|1997-02-22|COLLECT COD|FOB|, ironic packages wake a|
+3682|57|5|4|30|28711.50|0.09|0.05|N|O|1997-04-16|1997-04-16|1997-04-29|NONE|MAIL|he requests cajole quickly pending package|
+3683|101|4|1|35|35038.50|0.05|0.03|A|F|1993-05-31|1993-04-17|1993-06-14|NONE|SHIP| the furiously expr|
+3683|49|8|2|41|38910.64|0.01|0.06|A|F|1993-03-26|1993-05-06|1993-04-09|NONE|TRUCK|ress instructions. slyly express a|
+3683|100|3|3|23|23002.30|0.00|0.08|R|F|1993-07-02|1993-05-16|1993-07-30|NONE|TRUCK|xpress accounts sleep slyly re|
+3684|126|7|1|48|49253.76|0.04|0.06|A|F|1993-08-20|1993-09-02|1993-09-10|DELIVER IN PERSON|REG AIR|its boost alongside|
+3684|46|7|2|6|5676.24|0.06|0.08|R|F|1993-08-09|1993-10-05|1993-09-06|DELIVER IN PERSON|FOB|he silent requests. packages sleep fu|
+3684|163|8|3|19|20200.04|0.04|0.02|A|F|1993-10-19|1993-08-25|1993-11-02|COLLECT COD|FOB|e slyly carefully pending foxes. d|
+3684|135|1|4|13|13456.69|0.02|0.05|A|F|1993-07-23|1993-09-16|1993-08-06|NONE|TRUCK|ing, unusual pinto beans! thinly p|
+3685|47|4|1|37|35040.48|0.02|0.03|R|F|1992-03-11|1992-04-09|1992-04-05|DELIVER IN PERSON|TRUCK|ress attai|
+3685|58|6|2|7|6706.35|0.05|0.00|R|F|1992-05-16|1992-02-23|1992-05-17|DELIVER IN PERSON|FOB|sits. special asymptotes about the r|
+3685|134|5|3|38|39296.94|0.08|0.03|A|F|1992-05-17|1992-03-16|1992-06-06|TAKE BACK RETURN|TRUCK|thely unusual pack|
+3685|192|5|4|39|42595.41|0.10|0.05|R|F|1992-02-19|1992-04-06|1992-03-02|COLLECT COD|FOB|ic courts nag carefully after the |
+3685|56|7|5|37|35373.85|0.00|0.01|A|F|1992-03-02|1992-04-10|1992-03-04|NONE|FOB|. carefully sly requests are regular, regu|
+3686|122|5|1|7|7154.84|0.02|0.04|N|O|1998-07-15|1998-08-22|1998-07-30|DELIVER IN PERSON|TRUCK| furiously unusual accou|
+3686|200|2|2|38|41807.60|0.06|0.03|N|O|1998-09-04|1998-08-11|1998-09-19|DELIVER IN PERSON|AIR|y silent foxes! carefully ruthless cour|
+3686|45|6|3|31|29296.24|0.10|0.06|N|O|1998-09-09|1998-08-28|1998-10-09|COLLECT COD|MAIL|gle across the courts. furiously regu|
+3686|117|1|4|7|7119.77|0.10|0.01|N|O|1998-07-16|1998-09-02|1998-07-22|NONE|FOB|ake carefully carefully q|
+3687|145|4|1|32|33444.48|0.03|0.06|R|F|1993-05-07|1993-04-05|1993-05-25|DELIVER IN PERSON|AIR|deas cajole fo|
+3687|81|2|2|2|1962.16|0.00|0.08|R|F|1993-02-23|1993-03-25|1993-03-11|NONE|TRUCK| express requests. slyly regular depend|
+3687|174|4|3|10|10741.70|0.01|0.02|A|F|1993-02-11|1993-03-22|1993-03-09|NONE|FOB|ing pinto beans|
+3687|162|9|4|19|20181.04|0.02|0.05|A|F|1993-05-14|1993-04-24|1993-06-01|DELIVER IN PERSON|MAIL|ly final asymptotes according to t|
+3687|119|9|5|31|31592.41|0.07|0.08|A|F|1993-05-28|1993-03-20|1993-06-05|DELIVER IN PERSON|FOB|foxes cajole quickly about the furiously f|
+3712|141|4|1|27|28110.78|0.01|0.05|R|F|1992-02-01|1992-02-26|1992-03-02|TAKE BACK RETURN|SHIP|ctions. even accounts haggle alongside |
+3712|185|6|2|13|14107.34|0.03|0.03|R|F|1992-04-30|1992-02-11|1992-05-30|DELIVER IN PERSON|FOB|s around the furiously ironic account|
+3712|64|1|3|44|42418.64|0.01|0.01|A|F|1992-03-26|1992-02-19|1992-04-18|TAKE BACK RETURN|FOB|ously permanently regular req|
+3712|148|7|4|38|39829.32|0.01|0.06|A|F|1992-01-15|1992-03-24|1992-01-27|COLLECT COD|RAIL|s nag carefully-- even, reg|
+3713|112|6|1|41|41496.51|0.07|0.08|N|O|1998-05-11|1998-07-17|1998-05-22|COLLECT COD|RAIL|eposits wake blithely fina|
+3713|177|7|2|19|20466.23|0.04|0.04|N|O|1998-06-25|1998-07-24|1998-07-08|DELIVER IN PERSON|AIR|tructions serve blithely around the furi|
+3713|180|1|3|19|20523.42|0.03|0.02|N|O|1998-05-19|1998-07-06|1998-06-09|DELIVER IN PERSON|REG AIR|quests cajole careful|
+3713|169|10|4|45|48112.20|0.06|0.04|N|O|1998-06-15|1998-07-30|1998-07-14|DELIVER IN PERSON|MAIL|al pinto beans affix after the slyly |
+3713|90|1|5|46|45544.14|0.10|0.04|N|O|1998-08-22|1998-06-27|1998-08-31|NONE|MAIL|totes. carefully special theodolites s|
+3713|182|3|6|29|31383.22|0.09|0.03|N|O|1998-08-04|1998-06-13|1998-08-21|NONE|RAIL|the regular dugouts wake furiously sil|
+3713|130|1|7|14|14421.82|0.04|0.00|N|O|1998-07-19|1998-07-02|1998-07-28|DELIVER IN PERSON|SHIP|eposits impress according|
+3714|69|6|1|13|12597.78|0.07|0.03|N|O|1998-06-26|1998-06-17|1998-07-07|TAKE BACK RETURN|REG AIR| the furiously final|
+3714|146|3|2|14|14645.96|0.02|0.05|N|O|1998-05-30|1998-06-30|1998-05-31|DELIVER IN PERSON|RAIL|ending ideas. thinly unusual theodo|
+3714|159|10|3|16|16946.40|0.00|0.02|N|O|1998-05-25|1998-07-07|1998-06-17|TAKE BACK RETURN|AIR|ccounts cajole fu|
+3714|30|9|4|44|40921.32|0.04|0.02|N|O|1998-07-18|1998-07-10|1998-07-22|DELIVER IN PERSON|AIR|s. quickly ironic dugouts sublat|
+3715|97|1|1|13|12962.17|0.00|0.03|N|O|1996-05-11|1996-04-25|1996-06-09|TAKE BACK RETURN|SHIP|e quickly ironic|
+3715|169|6|2|16|17106.56|0.01|0.06|N|O|1996-06-28|1996-04-22|1996-06-30|TAKE BACK RETURN|AIR|usly regular pearls haggle final packages|
+3715|12|3|3|37|33744.37|0.05|0.02|N|O|1996-05-03|1996-04-30|1996-05-17|NONE|SHIP|ut the carefully expr|
+3716|32|8|1|10|9320.30|0.09|0.04|N|O|1997-12-02|1997-11-09|1997-12-14|TAKE BACK RETURN|SHIP|ts. quickly sly ideas slee|
+3716|194|5|2|39|42673.41|0.02|0.08|N|O|1997-11-27|1997-10-23|1997-12-24|COLLECT COD|REG AIR|even deposits.|
+3716|107|8|3|42|42298.20|0.02|0.08|N|O|1997-12-03|1997-10-12|1997-12-15|NONE|TRUCK| of the pend|
+3716|165|10|4|19|20238.04|0.05|0.08|N|O|1997-09-25|1997-10-18|1997-10-12|NONE|TRUCK|arefully unusual accounts. flu|
+3716|182|3|5|25|27054.50|0.06|0.05|N|O|1997-11-23|1997-10-24|1997-11-24|COLLECT COD|REG AIR|fully unusual accounts. carefu|
+3717|153|8|1|45|47391.75|0.07|0.04|N|O|1998-08-09|1998-08-18|1998-08-14|TAKE BACK RETURN|TRUCK|ests wake whithout the blithely final pl|
+3717|53|5|2|3|2859.15|0.01|0.07|N|O|1998-06-09|1998-07-31|1998-06-14|NONE|REG AIR|nside the regular packages sleep|
+3717|196|7|3|45|49328.55|0.05|0.08|N|O|1998-09-19|1998-07-22|1998-09-28|DELIVER IN PERSON|MAIL|s the blithely unu|
+3717|69|6|4|5|4845.30|0.06|0.03|N|O|1998-09-02|1998-08-20|1998-09-26|TAKE BACK RETURN|AIR|quickly among |
+3717|16|7|5|7|6412.07|0.09|0.02|N|O|1998-09-08|1998-07-18|1998-09-10|DELIVER IN PERSON|RAIL| after the packa|
+3717|64|1|6|38|36634.28|0.01|0.07|N|O|1998-07-10|1998-07-08|1998-07-29|COLLECT COD|RAIL|ly about the car|
+3717|106|7|7|28|28170.80|0.03|0.01|N|O|1998-07-25|1998-08-12|1998-08-16|COLLECT COD|RAIL|ts sleep q|
+3718|21|10|1|40|36840.80|0.01|0.04|N|O|1996-11-20|1996-12-17|1996-12-03|DELIVER IN PERSON|MAIL|out the express deposits|
+3718|163|8|2|16|17010.56|0.02|0.06|N|O|1996-11-11|1996-12-25|1996-11-12|COLLECT COD|TRUCK|slyly even accounts. blithely special acco|
+3718|70|5|3|8|7760.56|0.05|0.03|N|O|1996-12-06|1996-12-06|1996-12-15|TAKE BACK RETURN|AIR| the even deposits sleep carefully b|
+3719|22|5|1|35|32270.70|0.06|0.08|N|O|1997-06-11|1997-04-03|1997-06-15|TAKE BACK RETURN|TRUCK|ly foxes. pending braids haggle furio|
+3719|174|4|2|2|2148.34|0.02|0.08|N|O|1997-02-17|1997-04-25|1997-03-03|NONE|REG AIR|ccounts boost carefu|
+3719|182|3|3|12|12986.16|0.05|0.06|N|O|1997-06-10|1997-05-04|1997-07-09|TAKE BACK RETURN|REG AIR|grate according to the |
+3719|90|1|4|13|12871.17|0.02|0.00|N|O|1997-05-03|1997-04-16|1997-05-27|TAKE BACK RETURN|SHIP|iously. regular dep|
+3719|78|8|5|19|18583.33|0.06|0.08|N|O|1997-05-22|1997-03-20|1997-06-12|COLLECT COD|TRUCK|he regular ideas integrate acros|
+3719|142|5|6|43|44812.02|0.03|0.08|N|O|1997-05-08|1997-04-15|1997-06-06|COLLECT COD|RAIL|the furiously special pinto bean|
+3719|19|10|7|16|14704.16|0.10|0.01|N|O|1997-03-02|1997-03-18|1997-03-28|TAKE BACK RETURN|RAIL| express asymptotes. ir|
+3744|195|8|1|30|32855.70|0.05|0.06|A|F|1992-05-07|1992-02-12|1992-05-17|TAKE BACK RETURN|FOB|nts among |
+3745|137|8|1|18|18668.34|0.01|0.05|A|F|1993-10-17|1993-11-16|1993-11-13|DELIVER IN PERSON|SHIP| slyly bold pinto beans according to |
+3746|165|6|1|37|39410.92|0.07|0.00|A|F|1994-12-29|1994-10-25|1995-01-03|COLLECT COD|FOB|e of the careful|
+3746|144|7|2|28|29235.92|0.06|0.08|R|F|1994-09-20|1994-10-21|1994-09-27|DELIVER IN PERSON|FOB|s after the even, special requests|
+3746|188|9|3|3|3264.54|0.10|0.01|R|F|1994-11-03|1994-12-10|1994-11-12|NONE|MAIL| the silent ideas cajole carefully |
+3746|28|7|4|11|10208.22|0.00|0.05|R|F|1994-10-02|1994-11-19|1994-10-10|COLLECT COD|SHIP| ironic theodolites are among th|
+3747|141|10|1|42|43727.88|0.05|0.05|N|O|1996-11-10|1996-10-19|1996-11-19|TAKE BACK RETURN|REG AIR|y. blithely fina|
+3747|170|1|2|33|35315.61|0.01|0.03|N|O|1996-10-14|1996-11-12|1996-11-11|NONE|REG AIR| regular p|
+3747|139|10|3|30|31173.90|0.00|0.07|N|O|1996-12-16|1996-11-15|1996-12-17|NONE|RAIL|! furiously f|
+3747|33|9|4|21|19593.63|0.00|0.06|N|O|1996-11-18|1996-09-23|1996-11-26|TAKE BACK RETURN|AIR|ithely bold orbits mold furiously blit|
+3747|126|5|5|32|32835.84|0.08|0.05|N|O|1996-09-10|1996-11-04|1996-10-10|DELIVER IN PERSON|MAIL|quests shall h|
+3747|154|5|6|14|14758.10|0.08|0.07|N|O|1996-11-03|1996-10-29|1996-11-06|TAKE BACK RETURN|AIR|packages cajole carefu|
+3747|118|2|7|23|23416.53|0.00|0.04|N|O|1996-11-08|1996-11-10|1996-12-03|NONE|REG AIR|kages are ironic|
+3748|104|7|1|12|12049.20|0.06|0.01|N|O|1998-04-17|1998-04-15|1998-05-12|NONE|AIR|old reques|
+3748|165|4|2|24|25563.84|0.08|0.04|N|O|1998-06-07|1998-05-02|1998-06-21|DELIVER IN PERSON|TRUCK|al deposits. blithely|
+3748|197|1|3|19|20846.61|0.05|0.01|N|O|1998-04-23|1998-05-17|1998-05-23|COLLECT COD|RAIL|pinto beans run carefully quic|
+3748|187|8|4|5|5435.90|0.00|0.07|N|O|1998-06-29|1998-05-06|1998-07-12|DELIVER IN PERSON|MAIL| regular accounts sleep quickly-- furious|
+3748|147|4|5|21|21989.94|0.07|0.08|N|O|1998-03-30|1998-04-07|1998-04-05|TAKE BACK RETURN|MAIL|fix carefully furiously express ideas. furi|
+3749|173|3|1|11|11804.87|0.07|0.05|N|O|1995-06-25|1995-05-23|1995-07-10|TAKE BACK RETURN|RAIL|egular requests along the |
+3749|129|8|2|9|9262.08|0.08|0.05|A|F|1995-04-23|1995-04-18|1995-04-26|NONE|REG AIR|uses cajole blithely pla|
+3749|199|2|3|31|34074.89|0.00|0.05|N|F|1995-06-11|1995-05-20|1995-06-27|COLLECT COD|REG AIR|s. foxes sleep slyly unusual grouc|
+3749|131|2|4|7|7217.91|0.07|0.06|A|F|1995-03-31|1995-04-05|1995-04-11|NONE|TRUCK|he slyly ironic packages|
+3749|183|4|5|14|15164.52|0.02|0.00|N|F|1995-06-11|1995-05-19|1995-07-11|DELIVER IN PERSON|SHIP|press instruc|
+3749|54|6|6|10|9540.50|0.10|0.03|N|O|1995-06-24|1995-05-24|1995-07-18|COLLECT COD|SHIP|essly. regular pi|
+3750|134|10|1|37|38262.81|0.04|0.03|N|O|1995-07-08|1995-07-28|1995-07-28|DELIVER IN PERSON|REG AIR|usly busy account|
+3750|152|3|2|33|34720.95|0.05|0.03|N|O|1995-06-27|1995-06-20|1995-07-03|TAKE BACK RETURN|REG AIR|theodolites haggle. slyly pendin|
+3750|80|10|3|20|19601.60|0.09|0.05|N|F|1995-06-17|1995-06-06|1995-06-28|TAKE BACK RETURN|REG AIR|ss, ironic requests! fur|
+3750|166|1|4|33|35183.28|0.04|0.03|N|F|1995-06-15|1995-06-04|1995-06-29|COLLECT COD|RAIL|ep blithely according to the flu|
+3750|83|4|5|1|983.08|0.05|0.01|N|O|1995-07-24|1995-06-25|1995-08-21|DELIVER IN PERSON|REG AIR|l dolphins against the slyly|
+3750|113|7|6|47|47616.17|0.01|0.08|R|F|1995-05-11|1995-06-13|1995-06-02|TAKE BACK RETURN|FOB|slowly regular accounts. blithely ev|
+3751|172|2|1|37|39670.29|0.00|0.04|R|F|1994-04-30|1994-05-30|1994-05-30|NONE|REG AIR|ly express courts |
+3751|141|8|2|32|33316.48|0.03|0.05|R|F|1994-05-05|1994-07-02|1994-06-02|COLLECT COD|MAIL|rthogs could have to slee|
+3751|65|2|3|45|43427.70|0.08|0.06|R|F|1994-05-27|1994-06-19|1994-06-14|NONE|RAIL|according to |
+3751|14|4|4|39|35646.39|0.07|0.01|A|F|1994-08-16|1994-07-11|1994-09-12|COLLECT COD|TRUCK|refully according to the iro|
+3751|58|3|5|12|11496.60|0.02|0.03|A|F|1994-08-09|1994-06-30|1994-08-12|TAKE BACK RETURN|TRUCK|accounts wake furious|
+3751|76|5|6|39|38066.73|0.02|0.08|R|F|1994-08-01|1994-06-01|1994-08-26|COLLECT COD|SHIP|to beans. pending, express packages c|
+3776|3|10|1|39|35217.00|0.05|0.01|R|F|1993-01-03|1993-02-05|1993-01-08|COLLECT COD|FOB|yly blithely pending packages|
+3776|159|4|2|14|14828.10|0.06|0.08|R|F|1992-12-30|1993-02-12|1993-01-27|DELIVER IN PERSON|RAIL|y special ideas. express packages pr|
+3776|141|8|3|49|51015.86|0.01|0.08|R|F|1992-12-03|1993-02-16|1992-12-28|TAKE BACK RETURN|RAIL|equests. final, thin grouches |
+3776|92|6|4|49|48612.41|0.08|0.05|A|F|1993-02-11|1993-01-06|1993-02-27|COLLECT COD|MAIL|es: careful warthogs haggle fluffi|
+3777|100|4|1|11|11001.10|0.02|0.03|A|F|1994-04-09|1994-06-05|1994-04-14|NONE|FOB|ld ideas. even theodolites|
+3777|8|5|2|10|9080.00|0.03|0.01|R|F|1994-05-22|1994-05-29|1994-06-13|COLLECT COD|RAIL|le. ironic depths a|
+3777|166|7|3|18|19190.88|0.10|0.06|R|F|1994-05-04|1994-05-23|1994-05-22|COLLECT COD|REG AIR|eful packages use slyly: even deposits |
+3777|18|9|4|35|32130.35|0.10|0.04|A|F|1994-05-25|1994-05-26|1994-06-13|COLLECT COD|AIR|s. carefully express asymptotes accordi|
+3777|98|10|5|14|13973.26|0.04|0.05|R|F|1994-05-06|1994-06-24|1994-05-31|NONE|TRUCK|ording to the iro|
+3778|57|2|1|21|20098.05|0.01|0.06|R|F|1993-05-27|1993-07-10|1993-06-03|COLLECT COD|REG AIR|ts. blithely special theodoli|
+3778|29|10|2|32|29728.64|0.09|0.00|A|F|1993-06-22|1993-08-18|1993-07-03|TAKE BACK RETURN|MAIL|tes affix carefully above the |
+3778|94|6|3|41|40757.69|0.05|0.00|R|F|1993-06-21|1993-07-27|1993-07-15|COLLECT COD|FOB|e the furiously ironi|
+3778|169|4|4|28|29936.48|0.03|0.05|R|F|1993-08-18|1993-07-10|1993-09-06|TAKE BACK RETURN|REG AIR|y silent orbits print carefully against |
+3778|98|2|5|28|27946.52|0.01|0.06|R|F|1993-09-02|1993-08-08|1993-10-02|DELIVER IN PERSON|FOB|r deposits. theodol|
+3778|20|7|6|26|23920.52|0.00|0.01|A|F|1993-09-24|1993-07-06|1993-10-22|NONE|TRUCK| against the fluffily|
+3778|105|6|7|49|49249.90|0.02|0.04|A|F|1993-06-13|1993-08-08|1993-07-04|DELIVER IN PERSON|MAIL|ans. furiously |
+3779|46|5|1|28|26489.12|0.04|0.05|N|O|1997-05-06|1997-04-01|1997-05-18|TAKE BACK RETURN|AIR|s. close requests sleep|
+3779|110|3|2|5|5050.55|0.07|0.03|N|O|1997-01-07|1997-03-26|1997-02-05|DELIVER IN PERSON|AIR|heodolites. slyly regular a|
+3780|127|8|1|25|25678.00|0.08|0.04|N|O|1996-06-27|1996-07-02|1996-07-22|NONE|AIR|l, unusual |
+3780|190|1|2|40|43607.60|0.10|0.04|N|O|1996-06-06|1996-05-29|1996-07-01|COLLECT COD|SHIP|gular deposits-- furiously regular |
+3781|14|5|1|48|43872.48|0.02|0.06|N|O|1996-08-22|1996-08-13|1996-09-15|NONE|REG AIR|equests may cajole careful|
+3781|188|9|2|39|42439.02|0.10|0.00|N|O|1996-08-20|1996-08-16|1996-09-01|DELIVER IN PERSON|REG AIR|unts are carefully. ir|
+3781|30|1|3|17|15810.51|0.01|0.03|N|O|1996-06-23|1996-09-04|1996-07-19|TAKE BACK RETURN|REG AIR|. theodolite|
+3781|31|2|4|15|13965.45|0.05|0.00|N|O|1996-08-23|1996-08-08|1996-09-06|TAKE BACK RETURN|AIR| carefully blithe|
+3781|16|6|5|23|21068.23|0.09|0.08|N|O|1996-09-05|1996-08-18|1996-09-27|DELIVER IN PERSON|SHIP|pendencies are b|
+3782|27|10|1|29|26883.58|0.01|0.07|N|O|1996-09-17|1996-10-03|1996-10-07|DELIVER IN PERSON|REG AIR|quickly unusual pinto beans. carefully fina|
+3782|153|1|2|10|10531.50|0.03|0.05|N|O|1996-09-07|1996-11-19|1996-10-04|COLLECT COD|FOB|ven pinto b|
+3782|136|7|3|30|31083.90|0.06|0.06|N|O|1996-12-19|1996-10-31|1997-01-14|TAKE BACK RETURN|MAIL|slyly even pinto beans hag|
+3782|117|7|4|34|34581.74|0.02|0.06|N|O|1996-11-07|1996-10-22|1996-11-19|DELIVER IN PERSON|MAIL|gage after the even|
+3782|130|3|5|40|41205.20|0.09|0.04|N|O|1996-12-16|1996-11-22|1997-01-01|COLLECT COD|AIR|s instructions. regular accou|
+3783|167|4|1|36|38417.76|0.04|0.08|R|F|1993-12-17|1994-02-26|1994-01-03|DELIVER IN PERSON|SHIP|ites haggle among the carefully unusu|
+3783|73|3|2|36|35030.52|0.02|0.02|R|F|1994-03-02|1994-02-09|1994-03-15|COLLECT COD|TRUCK|egular accounts|
+3783|85|6|3|50|49254.00|0.04|0.01|R|F|1994-03-14|1994-01-09|1994-04-10|DELIVER IN PERSON|FOB|he furiously regular deposits. |
+3783|27|6|4|37|34299.74|0.10|0.05|R|F|1993-12-09|1994-02-17|1993-12-30|COLLECT COD|REG AIR|ing to the ideas. regular accounts de|
+3808|43|10|1|28|26405.12|0.02|0.01|R|F|1994-05-27|1994-06-18|1994-06-22|TAKE BACK RETURN|FOB|lly final accounts alo|
+3808|127|6|2|47|48274.64|0.04|0.08|R|F|1994-06-12|1994-06-03|1994-07-02|COLLECT COD|TRUCK|fully for the quickly final deposits: flu|
+3808|31|2|3|45|41896.35|0.00|0.03|R|F|1994-07-03|1994-05-29|1994-07-14|TAKE BACK RETURN|REG AIR| carefully special|
+3808|100|1|4|34|34003.40|0.07|0.04|R|F|1994-08-13|1994-07-22|1994-08-31|DELIVER IN PERSON|FOB| pearls will have to |
+3808|155|7|5|29|30599.35|0.08|0.03|A|F|1994-06-22|1994-05-26|1994-07-06|TAKE BACK RETURN|TRUCK| deposits across the pac|
+3808|168|5|6|44|46999.04|0.06|0.06|A|F|1994-06-07|1994-06-04|1994-06-25|NONE|REG AIR|the blithely regular foxes. even, final |
+3809|191|3|1|17|18550.23|0.10|0.04|N|O|1996-08-14|1996-07-05|1996-09-04|DELIVER IN PERSON|FOB|es detect furiously sil|
+3809|133|4|2|32|33060.16|0.01|0.02|N|O|1996-07-03|1996-06-01|1996-07-25|COLLECT COD|SHIP|xcuses would boost against the fluffily eve|
+3809|105|6|3|46|46234.60|0.10|0.06|N|O|1996-08-20|1996-06-01|1996-08-24|TAKE BACK RETURN|TRUCK|l asymptotes. special |
+3809|178|9|4|43|46361.31|0.00|0.04|N|O|1996-05-06|1996-06-22|1996-06-05|TAKE BACK RETURN|TRUCK|yly ironic decoys; regular, iron|
+3810|184|5|1|49|53124.82|0.05|0.01|R|F|1992-11-27|1992-10-30|1992-12-16|COLLECT COD|AIR|cajole. fur|
+3810|169|8|2|18|19244.88|0.01|0.04|A|F|1992-11-28|1992-11-15|1992-12-27|DELIVER IN PERSON|SHIP|s. furiously careful deposi|
+3810|137|3|3|41|42522.33|0.08|0.08|A|F|1992-10-26|1992-10-27|1992-11-05|COLLECT COD|SHIP|l requests boost slyly along the slyl|
+3810|182|3|4|11|11903.98|0.06|0.04|A|F|1992-12-18|1992-12-11|1993-01-15|DELIVER IN PERSON|MAIL| the pending pinto beans. expr|
+3811|164|3|1|24|25539.84|0.04|0.02|N|O|1998-07-13|1998-05-16|1998-08-12|TAKE BACK RETURN|TRUCK|deposits. slyly regular accounts cajo|
+3811|166|5|2|2|2132.32|0.01|0.08|N|O|1998-06-16|1998-06-16|1998-06-23|NONE|MAIL|slyly fluff|
+3811|43|6|3|19|17917.76|0.02|0.06|N|O|1998-07-20|1998-06-14|1998-07-29|NONE|MAIL|s boost blithely furiou|
+3811|171|1|4|50|53558.50|0.08|0.03|N|O|1998-07-28|1998-07-06|1998-08-16|COLLECT COD|FOB|ts are slyly fluffy ideas. furiou|
+3811|182|3|5|23|24890.14|0.00|0.04|N|O|1998-08-13|1998-07-09|1998-08-29|COLLECT COD|AIR|nstructions sleep quickly. slyly final |
+3811|2|7|6|35|31570.00|0.04|0.07|N|O|1998-04-17|1998-06-30|1998-04-25|NONE|REG AIR|yly final dolphins? quickly ironic frets|
+3812|145|4|1|33|34489.62|0.00|0.05|N|O|1996-10-10|1996-10-05|1996-10-15|TAKE BACK RETURN|MAIL|posits engage. ironic, regular p|
+3812|173|2|2|33|35414.61|0.06|0.03|N|O|1996-10-05|1996-10-13|1996-10-22|TAKE BACK RETURN|MAIL|inal excuses d|
+3813|176|7|1|37|39818.29|0.05|0.04|N|O|1998-10-13|1998-09-19|1998-10-28|NONE|REG AIR|ravely special packages haggle p|
+3813|123|2|2|39|39901.68|0.05|0.00|N|O|1998-08-30|1998-08-12|1998-09-29|COLLECT COD|FOB|y ideas. final ideas about the sp|
+3814|131|7|1|7|7217.91|0.02|0.02|R|F|1995-05-01|1995-05-09|1995-05-28|DELIVER IN PERSON|REG AIR|es sleep furiou|
+3814|173|3|2|14|15024.38|0.01|0.00|R|F|1995-03-17|1995-05-10|1995-04-16|DELIVER IN PERSON|AIR|sits along the final, ironic deposit|
+3814|168|7|3|36|38453.76|0.06|0.02|N|O|1995-06-19|1995-04-18|1995-06-28|COLLECT COD|SHIP|beans cajole quickly sl|
+3814|66|7|4|20|19321.20|0.04|0.07|R|F|1995-02-23|1995-03-26|1995-03-04|DELIVER IN PERSON|SHIP|. doggedly ironic deposits will have to wa|
+3814|107|2|5|15|15106.50|0.03|0.04|N|O|1995-06-23|1995-03-25|1995-07-09|COLLECT COD|SHIP| carefully final deposits haggle slyly|
+3814|83|4|6|47|46204.76|0.09|0.05|A|F|1995-04-16|1995-04-03|1995-05-14|DELIVER IN PERSON|AIR|nusual requests. bli|
+3814|132|8|7|12|12385.56|0.10|0.01|R|F|1995-03-18|1995-04-16|1995-03-20|TAKE BACK RETURN|REG AIR|ages cajole. packages haggle. final|
+3815|77|7|1|3|2931.21|0.07|0.00|N|O|1997-11-16|1997-11-15|1997-11-30|NONE|FOB|egular, express ideas. ironic, final dep|
+3815|130|5|2|11|11331.43|0.02|0.04|N|O|1997-11-01|1997-11-05|1997-11-27|COLLECT COD|TRUCK|sleep blithe|
+3840|187|8|1|45|48923.10|0.02|0.08|N|O|1998-10-31|1998-09-19|1998-11-30|DELIVER IN PERSON|TRUCK|o beans are. carefully final courts x|
+3840|46|9|2|12|11352.48|0.04|0.07|N|O|1998-10-02|1998-08-19|1998-10-20|TAKE BACK RETURN|RAIL|xpress pinto beans. accounts a|
+3840|73|4|3|45|43788.15|0.02|0.05|N|O|1998-10-12|1998-10-12|1998-10-28|TAKE BACK RETURN|FOB|onic, even packages are. pe|
+3840|148|9|4|41|42973.74|0.07|0.02|N|O|1998-07-21|1998-10-08|1998-08-01|TAKE BACK RETURN|MAIL| nag slyly? slyly pending accounts |
+3840|173|3|5|7|7512.19|0.09|0.08|N|O|1998-09-17|1998-09-20|1998-10-14|DELIVER IN PERSON|MAIL|. furiously final gifts sleep carefully pin|
+3840|107|8|6|33|33234.30|0.10|0.02|N|O|1998-07-29|1998-10-06|1998-08-04|DELIVER IN PERSON|SHIP|hely silent deposits w|
+3841|157|5|1|1|1057.15|0.06|0.03|A|F|1994-10-10|1994-11-12|1994-10-21|DELIVER IN PERSON|AIR| boost even re|
+3841|21|10|2|31|28551.62|0.09|0.03|A|F|1995-01-24|1994-11-25|1995-02-20|TAKE BACK RETURN|SHIP|n theodolites shall promise carefully. qui|
+3841|152|10|3|40|42086.00|0.06|0.02|A|F|1995-02-02|1994-11-30|1995-02-14|TAKE BACK RETURN|MAIL|its. quickly regular ideas nag carefully|
+3841|50|1|4|9|8550.45|0.10|0.07|A|F|1994-11-21|1994-12-26|1994-11-26|NONE|FOB|s according to the courts shall nag s|
+3841|176|7|5|3|3228.51|0.04|0.02|R|F|1994-10-24|1994-12-07|1994-11-09|COLLECT COD|FOB|foxes integrate |
+3841|163|8|6|48|51031.68|0.03|0.00|R|F|1994-11-23|1994-11-22|1994-12-01|DELIVER IN PERSON|FOB| according to the regular, |
+3842|162|7|1|28|29740.48|0.05|0.07|A|F|1992-06-17|1992-06-03|1992-06-24|DELIVER IN PERSON|TRUCK|s excuses thrash carefully.|
+3842|122|1|2|21|21464.52|0.07|0.05|R|F|1992-07-15|1992-06-02|1992-07-21|NONE|RAIL|r pinto be|
+3842|194|7|3|28|30637.32|0.00|0.00|A|F|1992-06-20|1992-05-22|1992-07-13|DELIVER IN PERSON|MAIL|lly alongside of the|
+3842|88|9|4|15|14821.20|0.07|0.01|A|F|1992-06-26|1992-06-23|1992-07-09|COLLECT COD|MAIL|ave packages are slyl|
+3842|68|3|5|13|12584.78|0.09|0.02|R|F|1992-04-13|1992-06-22|1992-05-11|COLLECT COD|RAIL|t blithely. busily regular accounts alon|
+3842|107|4|6|24|24170.40|0.08|0.08|R|F|1992-08-05|1992-06-29|1992-08-16|TAKE BACK RETURN|MAIL|phins are quickly|
+3843|15|6|1|7|6405.07|0.10|0.03|N|O|1997-02-13|1997-02-21|1997-02-20|TAKE BACK RETURN|SHIP|slyly even instructions. furiously eve|
+3843|1|4|2|30|27030.00|0.01|0.05|N|O|1997-02-14|1997-03-25|1997-03-13|DELIVER IN PERSON|AIR| wake. slyly even packages boost |
+3844|135|1|1|2|2070.26|0.03|0.07|R|F|1995-02-24|1995-02-03|1995-03-18|TAKE BACK RETURN|AIR|es haggle final acco|
+3844|102|7|2|5|5010.50|0.10|0.03|R|F|1995-04-29|1995-02-24|1995-05-05|TAKE BACK RETURN|RAIL| unwind quickly about the pending, i|
+3845|34|5|1|44|41097.32|0.01|0.08|A|F|1992-07-20|1992-07-15|1992-07-24|DELIVER IN PERSON|REG AIR|s haggle among the fluffily regula|
+3845|24|7|2|16|14784.32|0.09|0.05|A|F|1992-08-08|1992-06-08|1992-08-26|DELIVER IN PERSON|SHIP|ely bold ideas use. ex|
+3845|59|1|3|17|16303.85|0.08|0.01|A|F|1992-06-12|1992-07-05|1992-06-26|TAKE BACK RETURN|RAIL|counts haggle. reg|
+3845|46|9|4|1|946.04|0.04|0.05|R|F|1992-05-21|1992-06-07|1992-06-17|COLLECT COD|REG AIR| blithely ironic t|
+3845|196|7|5|27|29597.13|0.00|0.05|R|F|1992-08-20|1992-07-17|1992-09-02|COLLECT COD|REG AIR|kages. care|
+3845|105|8|6|30|30153.00|0.09|0.06|R|F|1992-08-21|1992-07-07|1992-08-25|COLLECT COD|FOB|counts do wake blithely. ironic requests |
+3846|61|10|1|15|14415.90|0.06|0.03|N|O|1998-02-17|1998-04-27|1998-02-21|NONE|REG AIR|uternes. carefully even|
+3846|171|2|2|30|32135.10|0.08|0.07|N|O|1998-05-01|1998-03-12|1998-05-20|TAKE BACK RETURN|FOB|deposits according to the fur|
+3846|15|5|3|49|44835.49|0.08|0.07|N|O|1998-02-14|1998-03-22|1998-02-17|DELIVER IN PERSON|RAIL|efully even packages against the blithe|
+3846|165|10|4|33|35150.28|0.05|0.00|N|O|1998-05-12|1998-03-14|1998-05-14|DELIVER IN PERSON|TRUCK|s instructions are. fu|
+3847|189|10|1|7|7624.26|0.08|0.00|A|F|1993-05-06|1993-06-06|1993-05-22|COLLECT COD|MAIL| about the blithely daring Tiresias. fl|
+3872|181|2|1|28|30273.04|0.10|0.04|N|O|1996-11-05|1996-11-10|1996-11-24|DELIVER IN PERSON|REG AIR|t after the carefully ironic excuses. f|
+3872|17|4|2|38|34846.38|0.04|0.05|N|O|1996-10-18|1996-12-03|1996-11-15|TAKE BACK RETURN|AIR|iously against the ironic, unusual a|
+3872|169|4|3|18|19244.88|0.07|0.07|N|O|1996-12-25|1996-10-24|1997-01-08|TAKE BACK RETURN|SHIP|s. regular, brave accounts sleep blith|
+3872|11|2|4|41|37351.41|0.07|0.03|N|O|1996-11-23|1996-11-12|1996-12-03|COLLECT COD|REG AIR|ly regular epitaphs boost|
+3872|70|7|5|42|40742.94|0.03|0.00|N|O|1997-01-03|1996-10-12|1997-01-16|COLLECT COD|MAIL|s the furio|
+3872|140|6|6|40|41605.60|0.07|0.05|N|O|1997-01-02|1996-10-29|1997-01-14|NONE|REG AIR|nts? regularly ironic ex|
+3873|68|3|1|19|18393.14|0.04|0.04|N|O|1998-05-15|1998-05-10|1998-05-17|NONE|FOB|y final ac|
+3873|145|8|2|44|45986.16|0.05|0.05|N|O|1998-07-23|1998-05-22|1998-08-14|COLLECT COD|AIR|yly even platelets wake. |
+3873|140|6|3|29|30164.06|0.01|0.04|N|O|1998-06-22|1998-05-20|1998-07-05|COLLECT COD|REG AIR|olphins af|
+3874|170|7|1|21|22473.57|0.09|0.08|R|F|1993-06-19|1993-07-20|1993-07-08|DELIVER IN PERSON|SHIP| requests cajole fluff|
+3874|19|6|2|48|44112.48|0.06|0.07|R|F|1993-06-13|1993-07-20|1993-06-20|NONE|RAIL| ideas throughout |
+3875|81|2|1|24|23545.92|0.02|0.08|N|O|1997-10-15|1997-11-27|1997-11-09|COLLECT COD|AIR|ecial packages. |
+3875|113|7|2|49|49642.39|0.04|0.04|N|O|1997-10-18|1997-10-13|1997-10-19|NONE|MAIL|sleep furiously about the deposits. quickl|
+3876|141|8|1|12|12493.68|0.06|0.07|N|O|1996-09-16|1996-10-23|1996-10-05|TAKE BACK RETURN|REG AIR|y above the pending tithes. blithely ironi|
+3876|140|6|2|37|38485.18|0.00|0.03|N|O|1996-11-30|1996-10-18|1996-12-18|DELIVER IN PERSON|AIR|t dependencies. blithely final packages u|
+3876|127|8|3|41|42111.92|0.02|0.04|N|O|1996-10-15|1996-10-17|1996-10-19|NONE|AIR| quickly blit|
+3877|50|7|1|12|11400.60|0.06|0.01|R|F|1993-05-30|1993-08-09|1993-06-24|TAKE BACK RETURN|FOB|nal requests. even requests are. pac|
+3877|145|4|2|47|49121.58|0.05|0.00|A|F|1993-08-01|1993-08-16|1993-08-04|NONE|FOB|furiously quick requests nag along the theo|
+3877|80|8|3|44|43123.52|0.09|0.00|A|F|1993-06-07|1993-07-15|1993-07-06|DELIVER IN PERSON|REG AIR|elets. quickly regular accounts caj|
+3877|148|9|4|36|37733.04|0.06|0.01|A|F|1993-07-27|1993-07-13|1993-08-11|DELIVER IN PERSON|AIR|lithely about the dogged ideas. ac|
+3877|5|6|5|41|37105.00|0.03|0.07|A|F|1993-06-30|1993-07-20|1993-07-01|DELIVER IN PERSON|FOB|integrate against the expres|
+3877|123|4|6|7|7161.84|0.04|0.08|R|F|1993-06-14|1993-07-09|1993-06-28|NONE|TRUCK|lar dolphins cajole silently |
+3878|200|1|1|6|6601.20|0.07|0.04|N|O|1997-06-21|1997-05-22|1997-07-01|COLLECT COD|FOB|s. regular instru|
+3878|88|9|2|13|12845.04|0.01|0.06|N|O|1997-06-08|1997-06-03|1997-06-25|TAKE BACK RETURN|TRUCK|leep ruthlessly about the carefu|
+3878|41|8|3|20|18820.80|0.08|0.03|N|O|1997-06-20|1997-05-24|1997-07-20|TAKE BACK RETURN|MAIL|the furiously careful ideas cajole slyly sl|
+3878|152|3|4|20|21043.00|0.01|0.07|N|O|1997-07-13|1997-05-22|1997-07-20|NONE|FOB|about the carefully ironic pa|
+3879|126|5|1|45|46175.40|0.10|0.08|N|O|1996-03-18|1996-01-03|1996-04-03|COLLECT COD|RAIL|ly according to the expr|
+3879|45|4|2|35|33076.40|0.00|0.07|N|O|1995-12-08|1996-01-23|1995-12-28|TAKE BACK RETURN|MAIL|o beans. accounts cajole furiously. re|
+3904|38|4|1|22|20636.66|0.04|0.03|N|O|1998-02-02|1998-02-09|1998-02-10|TAKE BACK RETURN|REG AIR|structions cajole carefully. carefully f|
+3904|184|5|2|19|20599.42|0.09|0.01|N|O|1998-02-10|1998-02-13|1998-02-20|TAKE BACK RETURN|AIR| excuses sleep slyly according to th|
+3905|101|8|1|43|43047.30|0.07|0.08|A|F|1994-03-30|1994-02-18|1994-04-09|DELIVER IN PERSON|REG AIR|uses are care|
+3905|116|10|2|7|7112.77|0.03|0.00|R|F|1994-03-01|1994-02-19|1994-03-11|DELIVER IN PERSON|AIR|ully furiously furious packag|
+3905|170|7|3|6|6421.02|0.07|0.02|R|F|1994-04-07|1994-03-07|1994-04-21|DELIVER IN PERSON|RAIL|ow furiously. deposits wake ironic |
+3906|153|1|1|42|44232.30|0.00|0.04|R|F|1992-09-03|1992-07-22|1992-09-04|COLLECT COD|RAIL|jole blithely after the furiously regular |
+3906|40|1|2|50|47002.00|0.01|0.07|R|F|1992-09-24|1992-08-24|1992-09-29|NONE|MAIL|ke slyly. stealt|
+3906|180|9|3|15|16202.70|0.06|0.02|R|F|1992-07-30|1992-08-26|1992-08-02|TAKE BACK RETURN|FOB|dependencies at the |
+3906|59|10|4|36|34525.80|0.08|0.08|A|F|1992-08-07|1992-08-08|1992-08-24|NONE|SHIP|y. ironic deposits haggle sl|
+3907|112|6|1|41|41496.51|0.06|0.02|A|F|1992-09-13|1992-10-23|1992-09-29|COLLECT COD|MAIL|ackages wake along the carefully regul|
+3907|145|4|2|41|42850.74|0.03|0.00|A|F|1992-10-25|1992-10-17|1992-11-01|TAKE BACK RETURN|RAIL|s above the unusual ideas sleep furiousl|
+3907|52|4|3|45|42842.25|0.02|0.07|R|F|1992-09-21|1992-09-19|1992-10-18|COLLECT COD|REG AIR| about the regular pac|
+3907|176|5|4|48|51656.16|0.05|0.07|A|F|1992-09-24|1992-10-16|1992-10-06|DELIVER IN PERSON|TRUCK|nt asymptotes lose across th|
+3907|62|3|5|22|21165.32|0.09|0.01|R|F|1992-09-20|1992-10-30|1992-09-29|TAKE BACK RETURN|TRUCK|ly. furiously unusual deposits use afte|
+3907|126|9|6|34|34888.08|0.02|0.02|R|F|1992-09-06|1992-10-08|1992-09-12|COLLECT COD|FOB| requests according to the slyly pending |
+3907|110|5|7|8|8080.88|0.10|0.01|A|F|1992-09-18|1992-10-29|1992-09-27|NONE|REG AIR|furiously final packages.|
+3908|92|4|1|50|49604.50|0.05|0.04|R|F|1993-06-19|1993-04-27|1993-07-05|DELIVER IN PERSON|MAIL| even accounts wake |
+3908|148|9|2|8|8385.12|0.06|0.03|A|F|1993-03-12|1993-04-13|1993-03-22|DELIVER IN PERSON|SHIP|r instructions was requests. ironically |
+3909|178|6|1|30|32345.10|0.03|0.07|N|O|1998-10-17|1998-10-14|1998-10-28|COLLECT COD|TRUCK|ly even deposits across the ironic notorni|
+3909|191|4|2|46|50194.74|0.03|0.01|N|O|1998-10-08|1998-10-15|1998-10-24|NONE|FOB|the blithely unusual ideas|
+3910|139|10|1|10|10391.30|0.00|0.08|N|O|1996-10-18|1996-10-31|1996-11-14|DELIVER IN PERSON|FOB|tions boost furiously unusual e|
+3910|71|10|2|31|30103.17|0.05|0.03|N|O|1996-12-22|1996-11-14|1997-01-01|TAKE BACK RETURN|SHIP|ess instructions. |
+3910|20|7|3|6|5520.12|0.04|0.04|N|O|1996-12-08|1996-10-30|1996-12-31|DELIVER IN PERSON|MAIL|ly sly platelets are fluffily slyly si|
+3910|153|1|4|1|1053.15|0.03|0.06|N|O|1996-09-12|1996-10-21|1996-09-19|DELIVER IN PERSON|FOB|s sleep neve|
+3911|113|7|1|10|10131.10|0.07|0.06|N|O|1995-06-22|1995-05-30|1995-06-28|COLLECT COD|FOB|ss theodolites are blithely along t|
+3911|119|9|2|14|14267.54|0.08|0.05|R|F|1995-04-28|1995-05-03|1995-05-22|NONE|RAIL|e blithely brave depo|
+3911|92|5|3|12|11905.08|0.10|0.05|R|F|1995-04-04|1995-04-16|1995-04-10|COLLECT COD|FOB|uctions. blithely regula|
+3936|137|8|1|25|25928.25|0.06|0.03|N|O|1996-12-03|1996-12-27|1997-01-01|DELIVER IN PERSON|RAIL|gular requests nag quic|
+3936|188|9|2|24|26116.32|0.10|0.07|N|O|1996-11-22|1997-01-01|1996-12-08|NONE|AIR|ns. accounts mold fl|
+3936|83|4|3|42|41289.36|0.00|0.07|N|O|1997-01-03|1997-01-29|1997-01-14|COLLECT COD|AIR|elets wake amo|
+3936|62|7|4|12|11544.72|0.06|0.05|N|O|1996-11-25|1997-01-09|1996-12-06|DELIVER IN PERSON|SHIP|ithely across the carefully brave req|
+3936|84|5|5|35|34442.80|0.02|0.08|N|O|1996-12-04|1997-01-06|1996-12-22|NONE|SHIP|lly ironic requ|
+3936|103|6|6|26|26080.60|0.01|0.02|N|O|1997-02-27|1997-01-16|1997-03-22|NONE|RAIL|quickly pen|
+3937|70|7|1|48|46563.36|0.10|0.02|N|O|1998-03-15|1998-02-22|1998-03-30|DELIVER IN PERSON|FOB|gainst the thinl|
+3937|48|1|2|30|28441.20|0.01|0.07|N|O|1998-01-17|1998-01-03|1998-02-08|COLLECT COD|TRUCK|al packages slee|
+3937|115|5|3|27|27407.97|0.03|0.00|N|O|1998-02-06|1998-01-12|1998-02-20|NONE|MAIL|ven ideas. slyly expr|
+3937|154|2|4|50|52707.50|0.01|0.02|N|O|1998-01-15|1998-01-09|1998-02-04|DELIVER IN PERSON|AIR|ong the carefully exp|
+3937|3|10|5|29|26187.00|0.03|0.07|N|O|1998-03-06|1998-02-22|1998-03-14|NONE|TRUCK|nt pinto beans above the pending instr|
+3937|193|6|6|6|6559.14|0.00|0.00|N|O|1998-01-24|1998-02-13|1998-01-27|DELIVER IN PERSON|FOB|into beans. slyly silent orbits alongside o|
+3937|164|9|7|1|1064.16|0.02|0.05|N|O|1998-03-29|1998-01-08|1998-04-27|TAKE BACK RETURN|TRUCK|refully agains|
+3938|159|4|1|46|48720.90|0.10|0.07|R|F|1993-05-20|1993-05-04|1993-06-12|DELIVER IN PERSON|FOB|ly even foxes are slyly fu|
+3939|160|8|1|8|8481.28|0.03|0.06|N|O|1996-01-29|1996-04-05|1996-02-26|COLLECT COD|REG AIR|e packages. express, pen|
+3940|178|7|1|33|35579.61|0.10|0.07|N|O|1996-05-19|1996-04-19|1996-05-23|TAKE BACK RETURN|RAIL|ly ironic packages about the pending accou|
+3940|69|4|2|40|38762.40|0.08|0.02|N|O|1996-02-29|1996-03-22|1996-03-04|NONE|MAIL|ts. regular fox|
+3940|89|10|3|8|7912.64|0.07|0.08|N|O|1996-04-04|1996-04-12|1996-04-18|DELIVER IN PERSON|RAIL|ions cajole furiously regular pinto beans. |
+3940|137|3|4|11|11408.43|0.09|0.05|N|O|1996-03-09|1996-05-13|1996-03-17|COLLECT COD|REG AIR|e of the special packages. furiously|
+3940|1|6|5|41|36941.00|0.00|0.07|N|O|1996-05-08|1996-05-03|1996-06-03|COLLECT COD|MAIL|thily. deposits cajole.|
+3941|41|2|1|47|44228.88|0.05|0.07|N|O|1996-11-24|1996-10-09|1996-12-22|DELIVER IN PERSON|RAIL| carefully pending|
+3941|123|6|2|19|19439.28|0.05|0.00|N|O|1996-11-10|1996-10-26|1996-12-05|COLLECT COD|RAIL|eposits haggle furiously even|
+3941|10|3|3|2|1820.02|0.01|0.03|N|O|1996-12-04|1996-10-01|1996-12-25|NONE|REG AIR|es wake after the|
+3941|110|7|4|29|29293.19|0.00|0.03|N|O|1996-09-14|1996-10-04|1996-09-19|NONE|MAIL|g the blithely|
+3942|183|4|1|6|6499.08|0.05|0.05|A|F|1993-07-01|1993-09-14|1993-07-23|DELIVER IN PERSON|SHIP|ep ruthlessly carefully final accounts: s|
+3942|194|7|2|5|5470.95|0.06|0.02|R|F|1993-09-27|1993-09-24|1993-10-07|DELIVER IN PERSON|MAIL|. fluffily pending deposits above the flu|
+3942|156|4|3|25|26403.75|0.04|0.06|R|F|1993-09-13|1993-08-01|1993-09-29|COLLECT COD|RAIL|d the quick packages|
+3943|198|2|1|15|16472.85|0.03|0.01|N|O|1997-01-13|1996-12-17|1997-02-02|COLLECT COD|REG AIR| grow fluffily according to the |
+3943|96|7|2|9|8964.81|0.00|0.06|N|O|1996-11-27|1997-01-03|1996-12-17|COLLECT COD|RAIL|refully ironic |
+3943|17|4|3|32|29344.32|0.00|0.02|N|O|1996-10-22|1996-12-17|1996-11-04|TAKE BACK RETURN|TRUCK| unusual ideas into the furiously even pack|
+3943|50|1|4|5|4750.25|0.04|0.04|N|O|1997-01-09|1996-11-10|1997-02-06|COLLECT COD|RAIL|arefully regular deposits accord|
+3968|54|2|1|27|25759.35|0.04|0.05|N|O|1997-04-25|1997-04-17|1997-05-11|TAKE BACK RETURN|MAIL|t silently.|
+3968|26|9|2|45|41670.90|0.00|0.07|N|O|1997-06-18|1997-04-24|1997-06-25|DELIVER IN PERSON|FOB|ully slyly fi|
+3968|156|7|3|43|45414.45|0.07|0.06|N|O|1997-04-30|1997-05-14|1997-05-18|TAKE BACK RETURN|SHIP|ly regular accounts|
+3968|61|8|4|7|6727.42|0.07|0.02|N|O|1997-03-30|1997-05-01|1997-04-12|DELIVER IN PERSON|SHIP|efully bold instructions. express|
+3969|52|4|1|39|37129.95|0.04|0.04|N|O|1997-06-12|1997-06-13|1997-07-05|NONE|MAIL|ly bold ideas s|
+3969|197|1|2|26|28526.94|0.05|0.03|N|O|1997-07-08|1997-07-30|1997-07-10|TAKE BACK RETURN|AIR|fluffily; braids detect.|
+3969|79|8|3|46|45037.22|0.04|0.02|N|O|1997-05-29|1997-06-15|1997-06-10|TAKE BACK RETURN|SHIP|fully final requests sleep stealthily. care|
+3969|151|9|4|21|22074.15|0.07|0.04|N|O|1997-08-31|1997-07-16|1997-09-02|TAKE BACK RETURN|MAIL|unts doze quickly final reque|
+3969|72|3|5|40|38882.80|0.09|0.00|N|O|1997-05-19|1997-08-02|1997-06-05|COLLECT COD|TRUCK|lar requests cajole furiously blithely regu|
+3969|105|8|6|4|4020.40|0.02|0.01|N|O|1997-06-04|1997-07-31|1997-06-13|COLLECT COD|REG AIR|dencies wake blithely? quickly even theodo|
+3970|88|9|1|2|1976.16|0.01|0.07|R|F|1992-04-24|1992-06-03|1992-05-16|TAKE BACK RETURN|RAIL|carefully pending foxes wake blithely |
+3970|109|6|2|18|18163.80|0.03|0.08|A|F|1992-06-06|1992-06-18|1992-07-05|DELIVER IN PERSON|TRUCK| maintain slyly. ir|
+3970|154|6|3|10|10541.50|0.10|0.04|A|F|1992-07-01|1992-05-31|1992-07-02|NONE|AIR| special packages wake after the final br|
+3970|22|5|4|34|31348.68|0.05|0.00|A|F|1992-06-25|1992-05-23|1992-07-12|COLLECT COD|SHIP|y final gifts are. carefully pe|
+3970|30|3|5|23|21390.69|0.05|0.04|A|F|1992-06-04|1992-06-14|1992-06-13|COLLECT COD|TRUCK| above the final braids. regular|
+3970|9|6|6|46|41814.00|0.07|0.04|R|F|1992-04-29|1992-05-14|1992-05-24|NONE|FOB|yly ironic|
+3970|5|8|7|46|41630.00|0.08|0.08|R|F|1992-05-02|1992-05-12|1992-05-10|COLLECT COD|MAIL|ix slyly. quickly silen|
+3971|96|8|1|47|46816.23|0.06|0.04|N|O|1996-07-07|1996-08-08|1996-08-01|TAKE BACK RETURN|RAIL|e slyly final dependencies x-ray |
+3971|191|5|2|2|2182.38|0.04|0.03|N|O|1996-07-15|1996-08-12|1996-07-26|NONE|SHIP|haggle abou|
+3972|51|3|1|2|1902.10|0.05|0.03|A|F|1994-07-24|1994-06-30|1994-08-13|TAKE BACK RETURN|SHIP|y final theodolite|
+3973|30|9|1|21|19530.63|0.02|0.06|R|F|1992-06-18|1992-06-03|1992-07-02|COLLECT COD|REG AIR|equests. furiously|
+3973|115|2|2|37|37559.07|0.07|0.00|A|F|1992-05-29|1992-05-04|1992-06-23|TAKE BACK RETURN|SHIP|inos wake fluffily. pending requests nag |
+3973|40|6|3|40|37601.60|0.08|0.05|R|F|1992-05-03|1992-06-09|1992-05-21|COLLECT COD|RAIL|g the carefully blithe f|
+3974|22|1|1|47|43334.94|0.10|0.03|N|O|1996-06-03|1996-05-08|1996-06-28|NONE|TRUCK|dencies above the re|
+3974|61|8|2|17|16338.02|0.05|0.07|N|O|1996-04-05|1996-05-21|1996-04-28|COLLECT COD|TRUCK|ions eat slyly after the blithely |
+3975|57|9|1|38|36367.90|0.01|0.05|N|O|1995-08-02|1995-06-18|1995-08-19|COLLECT COD|TRUCK|es are furiously: furi|
+4000|196|7|1|41|44943.79|0.06|0.01|A|F|1992-03-02|1992-03-14|1992-03-27|COLLECT COD|FOB|ve the even, fi|
+4000|75|5|2|44|42903.08|0.09|0.06|A|F|1992-03-27|1992-02-18|1992-03-31|COLLECT COD|AIR|equests use blithely blithely bold d|
+4001|106|1|1|26|26158.60|0.00|0.01|N|O|1997-07-26|1997-06-18|1997-08-08|DELIVER IN PERSON|RAIL|tegrate blithely|
+4001|41|10|2|19|17879.76|0.03|0.02|N|O|1997-08-23|1997-06-15|1997-09-18|COLLECT COD|SHIP|ackages. carefully ironi|
+4001|94|5|3|18|17893.62|0.07|0.00|N|O|1997-06-04|1997-06-22|1997-06-13|DELIVER IN PERSON|MAIL|lithely ironic d|
+4001|2|9|4|39|35178.00|0.00|0.00|N|O|1997-06-13|1997-06-17|1997-06-25|NONE|SHIP| dogged excuses. blithe|
+4002|111|5|1|35|35388.85|0.01|0.08|N|O|1997-05-16|1997-06-15|1997-06-02|DELIVER IN PERSON|TRUCK|eep. quickly|
+4002|198|9|2|20|21963.80|0.00|0.03|N|O|1997-06-15|1997-05-20|1997-07-11|NONE|MAIL|lly even ins|
+4002|40|1|3|6|5640.24|0.08|0.07|N|O|1997-05-02|1997-07-07|1997-05-16|TAKE BACK RETURN|RAIL| furiously furiously special theodoli|
+4002|199|3|4|6|6595.14|0.06|0.06|N|O|1997-07-01|1997-05-15|1997-07-31|NONE|MAIL|he slyly iro|
+4002|99|1|5|4|3996.36|0.08|0.07|N|O|1997-05-06|1997-06-15|1997-05-24|NONE|REG AIR|ccording to the careful|
+4003|52|4|1|18|17136.90|0.04|0.07|R|F|1993-02-02|1993-04-15|1993-02-28|TAKE BACK RETURN|AIR|ar grouches s|
+4004|121|2|1|23|23485.76|0.07|0.02|A|F|1993-08-12|1993-07-13|1993-08-16|TAKE BACK RETURN|TRUCK| bold theodolites? special packages accordi|
+4004|64|5|2|47|45310.82|0.07|0.04|R|F|1993-06-25|1993-08-03|1993-07-12|NONE|SHIP|thely instead of the even, unu|
+4004|114|5|3|39|39550.29|0.10|0.05|R|F|1993-07-12|1993-07-27|1993-07-18|NONE|MAIL|ccounts sleep furious|
+4004|74|4|4|46|44807.22|0.10|0.04|R|F|1993-09-04|1993-07-13|1993-09-28|COLLECT COD|FOB|ncies. slyly pending dolphins sleep furio|
+4004|155|3|5|9|9496.35|0.04|0.06|A|F|1993-08-25|1993-06-10|1993-09-24|COLLECT COD|MAIL|ly ironic requests. quickly pending ide|
+4004|161|10|6|44|46691.04|0.07|0.05|R|F|1993-07-25|1993-07-23|1993-08-16|TAKE BACK RETURN|REG AIR|ut the sauternes. bold, ironi|
+4004|126|9|7|20|20522.40|0.07|0.05|A|F|1993-06-19|1993-06-14|1993-07-04|COLLECT COD|REG AIR|. ironic deposits cajole blithely?|
+4005|4|1|1|26|23504.00|0.09|0.05|N|O|1996-12-01|1997-02-03|1996-12-15|NONE|REG AIR| to the quic|
+4005|17|8|2|28|25676.28|0.02|0.06|N|O|1996-12-11|1997-01-24|1996-12-17|DELIVER IN PERSON|REG AIR|ly carefully ironic deposits. slyly|
+4005|72|10|3|28|27217.96|0.03|0.01|N|O|1996-12-08|1997-01-14|1996-12-30|TAKE BACK RETURN|MAIL|y pending dependenc|
+4005|15|9|4|49|44835.49|0.09|0.00|N|O|1997-01-31|1996-12-24|1997-03-02|NONE|RAIL|tions sleep across the silent d|
+4005|6|7|5|14|12684.00|0.09|0.08|N|O|1996-11-27|1997-01-09|1996-12-25|NONE|TRUCK|ld requests. slyly final instructi|
+4006|55|7|1|11|10505.55|0.05|0.08|A|F|1995-04-29|1995-02-21|1995-05-20|TAKE BACK RETURN|RAIL|ress foxes cajole quick|
+4006|159|4|2|18|19064.70|0.05|0.03|A|F|1995-01-29|1995-03-08|1995-02-02|TAKE BACK RETURN|MAIL|gouts! slyly iron|
+4006|24|5|3|15|13860.30|0.01|0.02|R|F|1995-02-23|1995-04-02|1995-02-25|TAKE BACK RETURN|RAIL|n deposits cajole slyl|
+4006|114|5|4|25|25352.75|0.00|0.07|A|F|1995-02-23|1995-02-09|1995-02-24|DELIVER IN PERSON|SHIP| requests use depos|
+4007|57|2|1|32|30625.60|0.00|0.03|R|F|1993-09-30|1993-08-16|1993-10-03|DELIVER IN PERSON|RAIL|nal accounts across t|
+4007|116|10|2|41|41660.51|0.04|0.06|A|F|1993-10-11|1993-08-30|1993-11-04|DELIVER IN PERSON|TRUCK|eposits. regular epitaphs boost blithely.|
+4007|102|9|3|5|5010.50|0.09|0.06|A|F|1993-09-17|1993-08-29|1993-10-12|TAKE BACK RETURN|FOB|y unusual packa|
+4007|138|4|4|15|15571.95|0.05|0.02|A|F|1993-09-01|1993-07-19|1993-09-03|DELIVER IN PERSON|FOB|le furiously quickly |
+4007|26|7|5|23|21298.46|0.02|0.07|A|F|1993-10-08|1993-09-09|1993-10-23|COLLECT COD|MAIL|ter the accounts. expr|
+4032|102|3|1|8|8016.80|0.06|0.00|N|O|1998-06-04|1998-05-17|1998-07-03|TAKE BACK RETURN|RAIL|ometimes even cou|
+4032|2|9|2|27|24354.00|0.09|0.00|N|O|1998-05-31|1998-04-19|1998-06-24|COLLECT COD|REG AIR|le furiously according to|
+4032|154|2|3|23|24245.45|0.09|0.06|N|O|1998-06-12|1998-05-11|1998-06-24|COLLECT COD|MAIL|ording to the |
+4032|85|6|4|10|9850.80|0.09|0.05|N|O|1998-03-31|1998-04-22|1998-04-07|NONE|REG AIR| carefully bol|
+4033|110|1|1|27|27272.97|0.01|0.04|R|F|1993-08-08|1993-08-14|1993-08-09|NONE|AIR|pinto beans|
+4033|38|4|2|34|31893.02|0.07|0.00|R|F|1993-07-19|1993-08-05|1993-07-26|NONE|RAIL|t the blithely dogg|
+4034|190|1|1|48|52329.12|0.03|0.03|A|F|1994-03-01|1994-01-16|1994-03-16|NONE|RAIL| blithely regular requests play carefull|
+4034|57|5|2|47|44981.35|0.07|0.05|A|F|1994-01-27|1993-12-26|1994-02-04|NONE|TRUCK|eodolites was slyly ironic ideas. de|
+4034|54|5|3|43|41024.15|0.10|0.03|A|F|1993-11-29|1994-01-08|1993-12-10|DELIVER IN PERSON|FOB|posits wake carefully af|
+4034|28|9|4|46|42688.92|0.06|0.00|A|F|1994-02-22|1994-01-09|1994-03-04|DELIVER IN PERSON|AIR|uests. furiously unusual instructions wake|
+4034|196|10|5|7|7673.33|0.07|0.06|R|F|1994-03-04|1994-01-22|1994-04-01|NONE|AIR|y even theodolites. slyly regular instru|
+4034|50|9|6|5|4750.25|0.01|0.06|A|F|1994-02-12|1994-01-24|1994-02-13|COLLECT COD|AIR|fully around the furiously ironic re|
+4035|97|8|1|4|3988.36|0.08|0.03|R|F|1992-04-21|1992-04-23|1992-04-25|COLLECT COD|AIR|ilent, even pear|
+4035|136|7|2|4|4144.52|0.07|0.00|A|F|1992-05-21|1992-04-24|1992-05-24|DELIVER IN PERSON|FOB|en instructions sleep blith|
+4035|118|8|3|1|1018.11|0.03|0.01|R|F|1992-06-18|1992-05-19|1992-07-02|COLLECT COD|FOB| requests. quickly |
+4035|182|3|4|13|14068.34|0.00|0.01|R|F|1992-06-10|1992-05-16|1992-07-10|NONE|SHIP|s. furiously even courts wake slyly|
+4036|6|1|1|46|41676.00|0.09|0.00|N|O|1997-06-21|1997-05-29|1997-07-18|NONE|REG AIR|usly across the even th|
+4036|53|1|2|21|20014.05|0.09|0.07|N|O|1997-08-08|1997-06-28|1997-08-09|COLLECT COD|MAIL|e carefully. qui|
+4036|142|3|3|6|6252.84|0.07|0.01|N|O|1997-06-19|1997-06-16|1997-07-01|DELIVER IN PERSON|SHIP|equests wake about the bold id|
+4036|127|10|4|20|20542.40|0.08|0.02|N|O|1997-08-11|1997-07-11|1997-09-03|NONE|TRUCK|slyly bold deposits cajole pending, blithe|
+4037|64|9|1|32|30849.92|0.00|0.06|A|F|1993-05-06|1993-06-08|1993-05-31|DELIVER IN PERSON|AIR|e of the pending, iron|
+4037|47|8|2|4|3788.16|0.09|0.07|A|F|1993-07-05|1993-06-12|1993-08-03|DELIVER IN PERSON|RAIL|s around the blithely ironic ac|
+4038|196|10|1|40|43847.60|0.05|0.01|N|O|1996-01-15|1996-03-13|1996-01-25|COLLECT COD|TRUCK|t. slyly silent pinto beans amo|
+4038|12|9|2|37|33744.37|0.04|0.03|N|O|1996-03-17|1996-03-19|1996-04-07|DELIVER IN PERSON|REG AIR| packages |
+4038|32|3|3|24|22368.72|0.10|0.04|N|O|1996-04-06|1996-02-15|1996-04-18|TAKE BACK RETURN|RAIL|the furiously regu|
+4038|150|1|4|29|30454.35|0.07|0.06|N|O|1996-01-07|1996-03-08|1996-01-13|NONE|FOB|ffix. quietly ironic packages a|
+4038|79|7|5|24|23497.68|0.07|0.06|N|O|1996-04-01|1996-04-05|1996-04-28|DELIVER IN PERSON|TRUCK|ake quickly after the final, ironic ac|
+4038|36|2|6|6|5616.18|0.07|0.05|N|O|1996-02-09|1996-03-05|1996-03-10|COLLECT COD|SHIP| special instructions. packa|
+4039|94|5|1|38|37775.42|0.03|0.06|N|O|1998-03-09|1997-12-31|1998-03-21|DELIVER IN PERSON|REG AIR|sual asymptotes. ironic deposits nag aft|
+4039|122|5|2|17|17376.04|0.10|0.04|N|O|1998-01-15|1998-01-20|1998-01-28|TAKE BACK RETURN|MAIL| regular foxes haggle carefully bo|
+4039|64|1|3|9|8676.54|0.10|0.01|N|O|1998-03-08|1998-02-05|1998-04-05|TAKE BACK RETURN|FOB|t? pinto beans cajole across the thinly r|
+4039|28|3|4|43|39904.86|0.01|0.02|N|O|1998-01-02|1997-12-22|1998-01-15|NONE|FOB|beans believe bene|
+4039|134|5|5|43|44467.59|0.09|0.00|N|O|1998-01-20|1998-01-11|1998-02-05|COLLECT COD|SHIP|sts along the regular in|
+4064|199|1|1|3|3297.57|0.10|0.04|N|O|1997-01-04|1997-01-01|1997-01-23|NONE|SHIP|its! quickly sp|
+4064|40|6|2|15|14100.60|0.02|0.02|N|O|1996-11-09|1996-12-04|1996-11-18|DELIVER IN PERSON|MAIL|braids affix across the regular sheave|
+4064|197|10|3|32|35110.08|0.04|0.07|N|O|1997-01-14|1997-01-01|1997-01-21|COLLECT COD|REG AIR|es boost. careful|
+4064|163|8|4|24|25515.84|0.02|0.02|N|O|1997-01-01|1996-12-31|1997-01-23|DELIVER IN PERSON|SHIP|ly regular ideas.|
+4064|21|2|5|12|11052.24|0.08|0.08|N|O|1997-02-08|1996-12-18|1997-03-06|TAKE BACK RETURN|RAIL|ding to the requests|
+4064|184|5|6|46|49872.28|0.03|0.00|N|O|1996-10-13|1997-01-05|1996-11-06|DELIVER IN PERSON|REG AIR|alongside of the f|
+4064|200|2|7|9|9901.80|0.01|0.06|N|O|1996-12-17|1996-12-13|1997-01-12|NONE|AIR|furiously f|
+4065|138|9|1|14|14533.82|0.04|0.02|A|F|1994-08-22|1994-07-29|1994-09-19|DELIVER IN PERSON|TRUCK|e furiously outside |
+4065|15|6|2|46|42090.46|0.03|0.05|A|F|1994-06-29|1994-08-01|1994-07-03|TAKE BACK RETURN|SHIP|, regular requests may mold above the |
+4065|97|10|3|33|32903.97|0.00|0.03|A|F|1994-09-03|1994-08-16|1994-09-13|DELIVER IN PERSON|AIR|ain blithely |
+4065|107|2|4|8|8056.80|0.00|0.01|R|F|1994-10-04|1994-08-05|1994-10-25|TAKE BACK RETURN|SHIP|ages haggle carefully|
+4065|123|4|5|29|29670.48|0.02|0.07|A|F|1994-06-29|1994-08-19|1994-07-17|NONE|RAIL|equests. packages sleep slyl|
+4065|110|5|6|16|16161.76|0.05|0.00|R|F|1994-08-25|1994-08-06|1994-09-09|COLLECT COD|TRUCK|ncies use furiously. quickly un|
+4065|144|7|7|11|11485.54|0.10|0.04|A|F|1994-07-25|1994-08-02|1994-07-30|NONE|RAIL|hang silently about |
+4066|139|5|1|9|9352.17|0.01|0.05|N|O|1997-05-06|1997-03-25|1997-05-27|COLLECT COD|FOB|nal, ironic accounts. blithel|
+4066|93|5|2|19|18868.71|0.05|0.00|N|O|1997-05-13|1997-04-17|1997-06-08|NONE|TRUCK|quests. slyly regu|
+4066|76|5|3|8|7808.56|0.03|0.03|N|O|1997-04-24|1997-03-11|1997-05-20|NONE|REG AIR|accounts. special pinto beans|
+4066|179|9|4|49|52879.33|0.01|0.01|N|O|1997-02-17|1997-03-24|1997-02-19|NONE|TRUCK|ial braids. furiously final deposits sl|
+4066|171|2|5|43|46060.31|0.05|0.02|N|O|1997-02-16|1997-04-14|1997-02-18|DELIVER IN PERSON|MAIL|r instructions. slyly special |
+4066|109|2|6|44|44400.40|0.01|0.00|N|O|1997-03-01|1997-04-27|1997-03-29|DELIVER IN PERSON|MAIL|express accounts nag bli|
+4067|180|1|1|18|19443.24|0.03|0.08|A|F|1993-01-24|1992-12-23|1993-02-20|TAKE BACK RETURN|FOB|e the slyly final packages d|
+4067|96|10|2|14|13945.26|0.00|0.00|R|F|1993-02-03|1992-12-02|1993-02-07|TAKE BACK RETURN|TRUCK|ructions. quickly ironic accounts detect |
+4067|141|10|3|17|17699.38|0.03|0.05|A|F|1993-01-26|1992-11-23|1993-01-27|NONE|REG AIR|ts haggle slyly unusual, final|
+4067|90|1|4|40|39603.60|0.07|0.08|R|F|1993-01-09|1992-11-21|1993-01-16|DELIVER IN PERSON|TRUCK|lar theodolites nag blithely above the|
+4067|85|6|5|17|16746.36|0.08|0.03|A|F|1993-01-20|1992-12-29|1993-02-03|DELIVER IN PERSON|REG AIR|r accounts. slyly special pa|
+4067|96|8|6|12|11953.08|0.04|0.03|A|F|1992-12-12|1992-11-28|1992-12-15|DELIVER IN PERSON|AIR|lly slyly even theodol|
+4067|83|4|7|17|16712.36|0.01|0.01|R|F|1992-12-12|1992-12-23|1992-12-30|NONE|AIR|ts affix. regular, regular requests s|
+4068|110|1|1|43|43434.73|0.05|0.06|N|O|1996-11-28|1996-11-16|1996-12-22|NONE|AIR|ructions. regular, special packag|
+4068|57|5|2|31|29668.55|0.08|0.03|N|O|1996-12-11|1996-12-07|1996-12-30|NONE|SHIP|ds wake carefully amon|
+4069|129|2|1|39|40135.68|0.09|0.02|R|F|1992-09-06|1992-07-22|1992-09-25|COLLECT COD|SHIP|ven theodolites nag quickly. fluffi|
+4069|43|4|2|32|30177.28|0.10|0.08|A|F|1992-06-18|1992-07-20|1992-07-07|TAKE BACK RETURN|TRUCK|unts. deposit|
+4069|186|7|3|3|3258.54|0.06|0.01|R|F|1992-07-26|1992-07-07|1992-08-04|COLLECT COD|FOB|l packages. even, |
+4069|79|8|4|22|21539.54|0.10|0.05|A|F|1992-08-05|1992-08-04|1992-08-25|COLLECT COD|SHIP|ts. slyly special instruction|
+4069|157|5|5|50|52857.50|0.09|0.06|A|F|1992-07-26|1992-06-30|1992-08-01|TAKE BACK RETURN|REG AIR|even foxes among the express wate|
+4069|125|8|6|3|3075.36|0.02|0.01|A|F|1992-05-24|1992-06-18|1992-06-12|COLLECT COD|MAIL|y final deposits wake furiously! slyl|
+4069|184|5|7|50|54209.00|0.00|0.01|R|F|1992-09-03|1992-06-14|1992-10-01|NONE|REG AIR|ages. carefully regular |
+4070|183|4|1|2|2166.36|0.09|0.08|N|O|1995-08-03|1995-09-10|1995-08-17|TAKE BACK RETURN|REG AIR|ptotes affix|
+4070|155|3|2|40|42206.00|0.07|0.07|N|O|1995-07-13|1995-07-23|1995-08-06|COLLECT COD|MAIL|about the sentiments. quick|
+4070|62|3|3|11|10582.66|0.00|0.08|N|O|1995-08-23|1995-08-15|1995-08-31|TAKE BACK RETURN|MAIL| carefully final pack|
+4070|29|4|4|46|42734.92|0.02|0.02|N|O|1995-06-22|1995-07-14|1995-07-11|DELIVER IN PERSON|REG AIR|nticing ideas. boldly|
+4071|112|2|1|22|22266.42|0.02|0.07|N|O|1996-10-31|1996-12-14|1996-11-05|NONE|REG AIR|sits cajole carefully final instructio|
+4071|18|8|2|47|43146.47|0.00|0.03|N|O|1996-11-04|1996-12-09|1996-11-16|NONE|TRUCK|ts cajole furiously along the|
+4096|27|10|1|31|28737.62|0.10|0.02|A|F|1992-07-14|1992-09-03|1992-07-31|COLLECT COD|TRUCK|y final, even platelets. boldly|
+4096|57|9|2|17|16269.85|0.07|0.03|R|F|1992-09-30|1992-08-11|1992-10-11|TAKE BACK RETURN|REG AIR|platelets alongside of the |
+4096|9|10|3|21|19089.00|0.08|0.00|A|F|1992-08-24|1992-09-04|1992-09-11|DELIVER IN PERSON|MAIL|tes mold flu|
+4096|128|3|4|20|20562.40|0.02|0.07|R|F|1992-08-24|1992-09-13|1992-08-28|DELIVER IN PERSON|TRUCK|sual requests. furiously bold packages wake|
+4097|74|5|1|50|48703.50|0.04|0.04|N|O|1996-08-31|1996-08-14|1996-09-27|DELIVER IN PERSON|MAIL|egular deposits. blithely pending|
+4097|74|4|2|46|44807.22|0.10|0.01|N|O|1996-07-29|1996-08-19|1996-08-25|COLLECT COD|AIR| even depend|
+4097|174|2|3|42|45115.14|0.06|0.06|N|O|1996-08-11|1996-07-30|1996-08-15|NONE|FOB|carefully silent foxes are against the |
+4098|200|1|1|46|50609.20|0.07|0.03|N|O|1997-01-26|1997-01-27|1997-02-13|TAKE BACK RETURN|SHIP|e slyly blithely silent deposits. fluff|
+4099|4|7|1|29|26216.00|0.09|0.07|R|F|1992-11-21|1992-11-04|1992-11-30|NONE|FOB| slowly final warthogs sleep blithely. q|
+4099|137|3|2|3|3111.39|0.04|0.06|A|F|1992-09-12|1992-10-18|1992-10-01|NONE|RAIL|. special packages sleep|
+4099|51|3|3|36|34237.80|0.06|0.06|R|F|1992-11-06|1992-09-28|1992-12-02|NONE|FOB|beans cajole slyly quickly ironic |
+4099|139|5|4|7|7273.91|0.05|0.02|A|F|1992-09-12|1992-11-13|1992-09-14|TAKE BACK RETURN|AIR|onic foxes. quickly final fox|
+4099|163|10|5|48|51031.68|0.00|0.02|R|F|1992-10-18|1992-10-14|1992-11-01|NONE|REG AIR|ts haggle according to the slyly f|
+4099|59|10|6|39|37402.95|0.07|0.02|R|F|1992-12-13|1992-11-13|1992-12-26|DELIVER IN PERSON|REG AIR|fluffy accounts impress pending, iro|
+4099|180|8|7|46|49688.28|0.06|0.07|R|F|1992-10-29|1992-11-03|1992-11-10|DELIVER IN PERSON|REG AIR|ages nag requests.|
+4100|74|5|1|4|3896.28|0.03|0.03|N|O|1996-06-20|1996-04-29|1996-06-21|TAKE BACK RETURN|FOB|lyly regular, bold requ|
+4101|115|2|1|22|22332.42|0.05|0.02|R|F|1994-02-02|1994-02-19|1994-02-12|COLLECT COD|AIR|ly express instructions. careful|
+4102|10|3|1|17|15470.17|0.02|0.02|N|O|1996-06-03|1996-05-06|1996-07-02|COLLECT COD|AIR|ly silent theodolites sleep unusual exc|
+4102|69|8|2|5|4845.30|0.08|0.02|N|O|1996-05-11|1996-05-11|1996-05-16|COLLECT COD|AIR| the furiously even|
+4102|67|4|3|39|37715.34|0.08|0.01|N|O|1996-04-14|1996-05-18|1996-04-20|DELIVER IN PERSON|AIR|ffix blithely slyly special |
+4102|140|6|4|39|40565.46|0.02|0.00|N|O|1996-06-15|1996-06-06|1996-06-30|DELIVER IN PERSON|SHIP|y among the furiously special|
+4102|1|6|5|32|28832.00|0.08|0.01|N|O|1996-05-14|1996-04-29|1996-05-29|NONE|RAIL| the even requests; regular pinto|
+4102|137|8|6|7|7259.91|0.02|0.01|N|O|1996-06-19|1996-05-21|1996-07-15|NONE|REG AIR|bove the carefully pending the|
+4103|75|4|1|40|39002.80|0.05|0.03|R|F|1992-09-19|1992-08-14|1992-09-21|COLLECT COD|RAIL|usly across the slyly busy accounts! fin|
+4128|196|8|1|5|5480.95|0.04|0.04|N|O|1995-10-18|1995-11-28|1995-10-28|TAKE BACK RETURN|FOB|ake permanently |
+4129|56|8|1|32|30593.60|0.03|0.04|A|F|1993-09-16|1993-08-25|1993-09-25|TAKE BACK RETURN|MAIL|ckages haggl|
+4129|27|6|2|39|36153.78|0.06|0.07|R|F|1993-10-21|1993-08-04|1993-10-29|COLLECT COD|MAIL|y regular foxes. slyly ironic deposits |
+4130|178|6|1|44|47439.48|0.07|0.04|N|O|1996-05-14|1996-04-15|1996-05-15|COLLECT COD|TRUCK|eaves haggle qui|
+4130|63|10|2|2|1926.12|0.05|0.06|N|O|1996-05-19|1996-04-24|1996-06-17|TAKE BACK RETURN|RAIL|uriously regular instructions around th|
+4131|50|7|1|6|5700.30|0.05|0.01|N|O|1998-04-27|1998-04-18|1998-04-29|TAKE BACK RETURN|MAIL|ns cajole slyly. even, iro|
+4131|178|8|2|32|34501.44|0.08|0.01|N|O|1998-03-02|1998-03-21|1998-03-07|TAKE BACK RETURN|TRUCK| furiously regular asymptotes nod sly|
+4131|26|9|3|25|23150.50|0.02|0.07|N|O|1998-02-24|1998-03-01|1998-02-27|TAKE BACK RETURN|FOB|uickly exp|
+4131|36|7|4|8|7488.24|0.04|0.01|N|O|1998-03-03|1998-03-15|1998-03-26|COLLECT COD|FOB| after the furiously ironic d|
+4131|125|6|5|30|30753.60|0.01|0.01|N|O|1998-04-01|1998-04-13|1998-04-08|TAKE BACK RETURN|FOB|he fluffily express depen|
+4131|102|7|6|47|47098.70|0.02|0.00|N|O|1998-03-09|1998-04-05|1998-03-13|TAKE BACK RETURN|RAIL|ges. ironic pinto be|
+4132|138|4|1|28|29067.64|0.07|0.03|N|O|1995-08-16|1995-08-01|1995-08-29|TAKE BACK RETURN|SHIP|pths wake against the stealthily special pi|
+4132|15|5|2|23|21045.23|0.07|0.07|N|O|1995-06-27|1995-07-27|1995-07-13|TAKE BACK RETURN|FOB|d deposits. fluffily even requests haggle b|
+4132|87|8|3|18|17767.44|0.09|0.04|A|F|1995-06-01|1995-08-01|1995-06-02|TAKE BACK RETURN|RAIL|y final de|
+4133|24|5|1|35|32340.70|0.02|0.00|A|F|1992-11-25|1992-09-15|1992-12-25|NONE|AIR|g above the quickly bold packages. ev|
+4134|121|4|1|34|34718.08|0.02|0.05|R|F|1995-04-29|1995-03-13|1995-05-11|DELIVER IN PERSON|FOB|e furiously regular sheaves sleep|
+4134|96|10|2|34|33867.06|0.01|0.03|A|F|1995-05-06|1995-03-28|1995-05-13|DELIVER IN PERSON|SHIP|ual asymptotes wake carefully alo|
+4134|171|9|3|12|12854.04|0.05|0.04|A|F|1995-03-19|1995-03-27|1995-04-14|COLLECT COD|TRUCK|kly above the quickly regular |
+4134|100|4|4|45|45004.50|0.08|0.02|A|F|1995-04-11|1995-03-27|1995-04-17|TAKE BACK RETURN|MAIL|ironic pin|
+4135|2|3|1|23|20746.00|0.06|0.01|N|O|1997-04-09|1997-05-12|1997-04-16|TAKE BACK RETURN|FOB|posits cajole furiously carefully|
+4135|120|1|2|32|32643.84|0.07|0.00|N|O|1997-03-14|1997-04-23|1997-04-12|TAKE BACK RETURN|TRUCK| ideas. requests use. furiously|
+4135|160|5|3|33|34985.28|0.05|0.05|N|O|1997-05-01|1997-05-23|1997-05-23|DELIVER IN PERSON|AIR|he fluffil|
+4135|195|6|4|13|14237.47|0.04|0.07|N|O|1997-03-16|1997-05-19|1997-04-03|COLLECT COD|RAIL|efully special account|
+4160|113|10|1|25|25327.75|0.10|0.04|N|O|1996-09-22|1996-10-17|1996-09-24|NONE|SHIP|ar accounts sleep blithe|
+4160|122|7|2|12|12265.44|0.00|0.03|N|O|1996-11-22|1996-09-25|1996-12-10|DELIVER IN PERSON|REG AIR|y bold package|
+4160|63|4|3|48|46226.88|0.04|0.04|N|O|1996-09-19|1996-11-02|1996-09-24|COLLECT COD|FOB| unusual dolphins |
+4161|122|7|1|12|12265.44|0.08|0.02|R|F|1993-08-25|1993-10-04|1993-09-22|COLLECT COD|RAIL|onic dolphins. in|
+4161|28|3|2|47|43616.94|0.05|0.00|A|F|1993-12-20|1993-10-29|1994-01-19|TAKE BACK RETURN|RAIL|r requests about the final, even foxes hag|
+4161|138|4|3|42|43601.46|0.03|0.04|R|F|1993-11-12|1993-10-04|1993-11-27|COLLECT COD|MAIL|thely across the even attainments. express|
+4161|10|5|4|45|40950.45|0.02|0.06|A|F|1993-10-22|1993-10-17|1993-10-30|COLLECT COD|REG AIR|about the ironic packages cajole blithe|
+4161|29|10|5|46|42734.92|0.05|0.01|A|F|1993-11-09|1993-11-17|1993-11-17|TAKE BACK RETURN|TRUCK|he stealthily ironic foxes. ideas haggl|
+4161|148|9|6|19|19914.66|0.07|0.00|R|F|1993-08-22|1993-11-11|1993-09-01|TAKE BACK RETURN|REG AIR|beans breach s|
+4162|74|3|1|45|43833.15|0.10|0.07|A|F|1992-03-21|1992-05-02|1992-03-29|DELIVER IN PERSON|AIR|elets. slyly regular i|
+4162|90|1|2|29|28712.61|0.00|0.05|R|F|1992-02-25|1992-04-25|1992-03-17|NONE|REG AIR|nding pinto beans haggle blithe|
+4163|33|4|1|13|12129.39|0.08|0.03|A|F|1993-02-17|1993-03-13|1993-03-15|DELIVER IN PERSON|REG AIR|phins wake. pending requests inte|
+4164|120|7|1|9|9181.08|0.07|0.02|N|O|1998-08-25|1998-08-13|1998-09-19|DELIVER IN PERSON|SHIP|re fluffily slyly bold requests. |
+4165|41|2|1|12|11292.48|0.00|0.01|N|O|1997-09-20|1997-10-20|1997-10-12|TAKE BACK RETURN|REG AIR|nwind slow theodolites. carefully pending |
+4166|141|10|1|8|8329.12|0.00|0.08|A|F|1993-06-05|1993-04-10|1993-07-05|COLLECT COD|MAIL|uickly. blithely pending de|
+4166|93|5|2|8|7944.72|0.06|0.04|A|F|1993-06-07|1993-04-17|1993-06-16|DELIVER IN PERSON|REG AIR|es along the furiously regular acc|
+4166|7|10|3|17|15419.00|0.02|0.06|R|F|1993-06-29|1993-05-15|1993-07-24|DELIVER IN PERSON|SHIP|ackages. re|
+4166|86|7|4|36|35498.88|0.06|0.05|R|F|1993-03-01|1993-05-25|1993-03-05|COLLECT COD|MAIL|unts. furiously express accounts w|
+4166|77|6|5|5|4885.35|0.08|0.01|A|F|1993-06-19|1993-04-24|1993-06-27|NONE|REG AIR|hely unusual packages are above the f|
+4166|102|5|6|6|6012.60|0.04|0.08|R|F|1993-04-30|1993-04-17|1993-05-08|DELIVER IN PERSON|MAIL|ily ironic deposits print furiously. iron|
+4166|24|5|7|26|24024.52|0.09|0.01|R|F|1993-03-17|1993-05-09|1993-03-25|NONE|MAIL|lar dependencies. s|
+4167|61|8|1|47|45169.82|0.04|0.02|N|O|1998-08-02|1998-08-24|1998-08-28|DELIVER IN PERSON|REG AIR| carefully final asymptotes. slyly bo|
+4167|87|8|2|17|16780.36|0.06|0.07|N|O|1998-09-18|1998-09-06|1998-10-07|COLLECT COD|REG AIR|ly around the even instr|
+4167|73|3|3|1|973.07|0.03|0.06|N|O|1998-10-11|1998-08-14|1998-10-13|COLLECT COD|TRUCK|xpress platelets. blithely |
+4192|11|1|1|36|32796.36|0.06|0.08|N|O|1998-04-25|1998-05-26|1998-05-03|COLLECT COD|TRUCK|eodolites sleep|
+4192|121|6|2|15|15316.80|0.04|0.08|N|O|1998-06-26|1998-05-26|1998-07-16|COLLECT COD|AIR|e slyly special grouches. express pinto b|
+4192|135|6|3|7|7245.91|0.06|0.03|N|O|1998-05-19|1998-07-08|1998-05-31|COLLECT COD|FOB|y; excuses use. ironic, close instru|
+4192|24|3|4|32|29568.64|0.09|0.04|N|O|1998-06-23|1998-06-25|1998-07-17|NONE|FOB|ounts are fluffily slyly bold req|
+4192|48|7|5|48|45505.92|0.08|0.01|N|O|1998-08-17|1998-07-11|1998-09-03|NONE|AIR|ests. quickly bol|
+4192|150|7|6|44|46206.60|0.10|0.02|N|O|1998-08-06|1998-07-09|1998-08-20|NONE|FOB|structions mai|
+4192|170|5|7|27|28894.59|0.02|0.00|N|O|1998-07-03|1998-06-26|1998-07-13|TAKE BACK RETURN|AIR| carefully even escapades. care|
+4193|131|7|1|37|38151.81|0.09|0.06|A|F|1994-04-25|1994-02-24|1994-05-08|NONE|AIR|er the quickly regular dependencies wake|
+4193|117|7|2|3|3051.33|0.09|0.05|R|F|1994-04-29|1994-03-20|1994-05-29|TAKE BACK RETURN|REG AIR|osits above the depo|
+4193|179|10|3|10|10791.70|0.06|0.03|A|F|1994-02-10|1994-03-22|1994-03-09|COLLECT COD|RAIL|uffily spe|
+4193|51|9|4|29|27580.45|0.09|0.05|A|F|1994-02-11|1994-03-11|1994-03-13|TAKE BACK RETURN|RAIL|ly. final packages use blit|
+4193|20|7|5|50|46001.00|0.01|0.01|R|F|1994-04-28|1994-03-23|1994-05-09|NONE|FOB| beans. regular accounts cajole. de|
+4193|66|1|6|21|20287.26|0.02|0.04|R|F|1994-04-26|1994-03-22|1994-05-23|DELIVER IN PERSON|TRUCK|accounts cajole b|
+4194|197|1|1|43|47179.17|0.08|0.06|A|F|1994-11-06|1994-12-09|1994-11-16|NONE|TRUCK|olites are after the exp|
+4194|47|10|2|18|17046.72|0.07|0.07|A|F|1995-02-14|1994-12-04|1995-03-11|TAKE BACK RETURN|TRUCK|ld packages. quickly eve|
+4195|6|9|1|14|12684.00|0.09|0.04|R|F|1993-09-06|1993-07-21|1993-09-18|DELIVER IN PERSON|REG AIR|ironic packages. carefully express|
+4195|66|1|2|22|21253.32|0.10|0.08|R|F|1993-07-01|1993-07-23|1993-07-28|COLLECT COD|RAIL|lly express pinto bea|
+4195|194|8|3|19|20789.61|0.01|0.06|R|F|1993-09-06|1993-08-13|1993-09-15|TAKE BACK RETURN|REG AIR|telets sleep even requests. final, even i|
+4196|156|4|1|30|31684.50|0.02|0.06|N|O|1998-08-09|1998-06-30|1998-09-05|COLLECT COD|SHIP|egular foxes us|
+4196|9|6|2|31|28179.00|0.09|0.08|N|O|1998-06-12|1998-07-28|1998-07-11|NONE|MAIL|ut the blithely ironic inst|
+4196|178|9|3|46|49595.82|0.05|0.00|N|O|1998-09-05|1998-06-28|1998-09-10|TAKE BACK RETURN|MAIL|according to t|
+4196|114|8|4|42|42592.62|0.04|0.06|N|O|1998-08-13|1998-07-18|1998-09-07|TAKE BACK RETURN|AIR| instructions. courts cajole slyly ev|
+4196|72|2|5|3|2916.21|0.01|0.03|N|O|1998-05-17|1998-07-21|1998-05-18|DELIVER IN PERSON|TRUCK| accounts. fu|
+4196|87|8|6|43|42444.44|0.01|0.06|N|O|1998-08-12|1998-07-12|1998-08-22|DELIVER IN PERSON|FOB|es. slyly even |
+4196|4|1|7|3|2712.00|0.00|0.06|N|O|1998-08-05|1998-07-28|1998-08-15|DELIVER IN PERSON|REG AIR|y regular packages haggle furiously alongs|
+4197|129|8|1|50|51456.00|0.06|0.03|N|O|1996-11-15|1996-11-01|1996-11-20|NONE|FOB|. carefully bold asymptotes nag blithe|
+4197|70|9|2|39|37832.73|0.02|0.08|N|O|1996-10-07|1996-10-11|1996-10-18|DELIVER IN PERSON|RAIL|ronic requests. quickly bold packages in|
+4197|32|8|3|28|26096.84|0.06|0.02|N|O|1996-10-05|1996-10-24|1996-10-22|TAKE BACK RETURN|AIR|regular pin|
+4197|96|7|4|23|22910.07|0.00|0.03|N|O|1996-09-10|1996-10-10|1996-09-25|NONE|AIR|l instructions print slyly past the reg|
+4197|121|6|5|37|37781.44|0.03|0.04|N|O|1996-10-20|1996-10-10|1996-11-10|COLLECT COD|TRUCK|carefully enticing decoys boo|
+4197|31|7|6|48|44689.44|0.08|0.00|N|O|1996-10-07|1996-10-25|1996-10-23|COLLECT COD|REG AIR| final instructions. blithe, spe|
+4198|146|9|1|48|50214.72|0.09|0.05|N|O|1997-09-03|1997-07-18|1997-09-11|NONE|REG AIR|cajole carefully final, ironic ide|
+4198|143|6|2|46|47984.44|0.09|0.01|N|O|1997-08-17|1997-09-08|1997-09-11|COLLECT COD|TRUCK|posits among th|
+4198|145|4|3|13|13586.82|0.03|0.04|N|O|1997-07-18|1997-07-24|1997-08-10|NONE|REG AIR| furious excuses. bli|
+4199|70|5|1|16|15521.12|0.10|0.00|A|F|1992-06-11|1992-04-10|1992-07-10|COLLECT COD|TRUCK|ncies. furiously special accounts|
+4199|9|10|2|18|16362.00|0.00|0.01|A|F|1992-06-01|1992-03-30|1992-06-28|DELIVER IN PERSON|RAIL|pending, regular accounts. carefully|
+4224|199|10|1|27|29678.13|0.05|0.03|N|O|1997-09-05|1997-08-19|1997-09-30|NONE|SHIP|ly special deposits sleep qui|
+4224|37|3|2|20|18740.60|0.07|0.05|N|O|1997-11-09|1997-08-23|1997-11-14|NONE|FOB|unts promise across the requests. blith|
+4224|24|7|3|4|3696.08|0.08|0.05|N|O|1997-09-07|1997-09-05|1997-09-25|TAKE BACK RETURN|FOB| even dinos. carefull|
+4224|160|2|4|50|53008.00|0.10|0.06|N|O|1997-07-30|1997-09-10|1997-08-19|COLLECT COD|RAIL|side of the carefully silent dep|
+4224|85|6|5|48|47283.84|0.00|0.04|N|O|1997-10-03|1997-08-31|1997-10-10|NONE|RAIL| final, regular asymptotes use alway|
+4225|49|8|1|25|23726.00|0.08|0.04|N|O|1997-07-10|1997-08-08|1997-07-31|TAKE BACK RETURN|TRUCK|se fluffily. busily ironic requests are;|
+4225|96|8|2|23|22910.07|0.02|0.04|N|O|1997-09-18|1997-08-31|1997-10-11|TAKE BACK RETURN|RAIL|. quickly b|
+4225|98|10|3|28|27946.52|0.08|0.03|N|O|1997-07-11|1997-09-01|1997-08-03|DELIVER IN PERSON|FOB|ts are requests. even, bold depos|
+4226|188|9|1|27|29380.86|0.06|0.08|A|F|1993-05-03|1993-04-12|1993-05-16|COLLECT COD|AIR|sly alongside of the slyly ironic pac|
+4227|158|6|1|19|20104.85|0.01|0.08|A|F|1995-05-05|1995-05-03|1995-05-22|COLLECT COD|REG AIR|ns sleep along the blithely even theodolit|
+4227|33|4|2|8|7464.24|0.09|0.00|N|F|1995-06-11|1995-04-30|1995-06-28|COLLECT COD|REG AIR| packages since the bold, u|
+4227|75|6|3|11|10725.77|0.10|0.04|A|F|1995-03-30|1995-05-02|1995-04-26|DELIVER IN PERSON|SHIP|l requests-- bold requests cajole dogg|
+4227|200|4|4|2|2200.40|0.02|0.05|R|F|1995-04-24|1995-05-09|1995-05-21|DELIVER IN PERSON|AIR|ep. specia|
+4227|147|6|5|49|51309.86|0.05|0.06|R|F|1995-05-19|1995-04-12|1995-06-12|TAKE BACK RETURN|REG AIR|ts sleep blithely carefully unusual ideas.|
+4228|141|10|1|20|20822.80|0.00|0.06|N|O|1997-04-24|1997-05-29|1997-05-17|NONE|RAIL|f the slyly fluffy pinto beans are|
+4229|96|9|1|44|43827.96|0.02|0.05|N|O|1998-05-29|1998-05-12|1998-06-16|DELIVER IN PERSON|AIR|s. carefully e|
+4229|5|8|2|34|30770.00|0.07|0.05|N|O|1998-05-26|1998-04-13|1998-06-08|DELIVER IN PERSON|MAIL|thely final accounts use even packa|
+4230|46|5|1|38|35949.52|0.10|0.03|A|F|1992-04-28|1992-04-21|1992-05-28|TAKE BACK RETURN|FOB|ly regular packages. regular ideas boost|
+4230|199|3|2|43|47265.17|0.02|0.08|R|F|1992-03-14|1992-05-13|1992-03-28|NONE|FOB|ses lose blithely slyly final e|
+4230|196|9|3|10|10961.90|0.06|0.02|A|F|1992-06-11|1992-04-11|1992-07-02|TAKE BACK RETURN|MAIL|ar packages are |
+4230|75|6|4|28|27301.96|0.01|0.03|R|F|1992-05-12|1992-05-10|1992-06-01|TAKE BACK RETURN|MAIL|nt instruct|
+4230|125|10|5|50|51256.00|0.00|0.01|A|F|1992-03-29|1992-05-19|1992-04-20|TAKE BACK RETURN|SHIP|ts. final instructions in|
+4230|35|6|6|30|28050.90|0.05|0.07|A|F|1992-03-11|1992-04-29|1992-03-30|NONE|AIR|s. final excuses across the|
+4230|152|3|7|18|18938.70|0.10|0.04|R|F|1992-06-23|1992-05-10|1992-07-04|COLLECT COD|SHIP| the final acco|
+4231|142|3|1|47|48980.58|0.09|0.03|N|O|1997-11-27|1998-01-26|1997-12-17|NONE|REG AIR|hely along the silent at|
+4231|166|3|2|4|4264.64|0.06|0.02|N|O|1997-11-28|1998-01-26|1997-12-12|TAKE BACK RETURN|MAIL|lithely even packages. |
+4231|121|2|3|31|31654.72|0.07|0.08|N|O|1998-02-14|1997-12-27|1998-03-01|DELIVER IN PERSON|FOB|ublate. theodoli|
+4231|40|1|4|35|32901.40|0.10|0.00|N|O|1998-02-21|1998-01-24|1998-03-18|DELIVER IN PERSON|FOB|le quickly regular, unus|
+4256|151|9|1|22|23125.30|0.05|0.05|R|F|1992-07-30|1992-05-14|1992-08-14|NONE|TRUCK|, final platelets are slyly final pint|
+4257|65|10|1|3|2895.18|0.10|0.03|N|O|1995-06-18|1995-05-01|1995-07-12|DELIVER IN PERSON|MAIL|thin the theodolites use after the bl|
+4257|35|6|2|5|4675.15|0.01|0.04|R|F|1995-04-29|1995-06-05|1995-05-13|TAKE BACK RETURN|TRUCK|n deposits. furiously e|
+4257|128|9|3|33|33927.96|0.03|0.04|A|F|1995-05-23|1995-05-03|1995-05-31|COLLECT COD|AIR|uffily regular accounts ar|
+4258|166|7|1|36|38381.76|0.02|0.06|N|O|1997-02-23|1997-01-25|1997-02-27|TAKE BACK RETURN|SHIP|ns use alongs|
+4258|162|1|2|19|20181.04|0.03|0.02|N|O|1997-01-14|1996-12-12|1997-01-20|TAKE BACK RETURN|AIR|ly busily ironic foxes. f|
+4258|31|7|3|46|42827.38|0.04|0.07|N|O|1997-01-02|1996-12-26|1997-01-12|DELIVER IN PERSON|AIR| furiously pend|
+4258|35|6|4|22|20570.66|0.04|0.04|N|O|1996-12-12|1996-12-06|1996-12-20|TAKE BACK RETURN|AIR|e regular, even asym|
+4258|163|10|5|9|9568.44|0.04|0.03|N|O|1996-12-04|1996-12-08|1996-12-20|DELIVER IN PERSON|TRUCK|counts wake permanently after the bravely|
+4259|43|6|1|14|13202.56|0.05|0.03|N|O|1998-01-09|1997-11-21|1998-01-29|TAKE BACK RETURN|RAIL| furiously pending excuses. ideas hagg|
+4260|24|7|1|21|19404.42|0.08|0.04|R|F|1992-08-06|1992-06-18|1992-08-22|NONE|AIR|al, pending accounts must|
+4261|110|1|1|12|12121.32|0.05|0.01|A|F|1992-11-01|1993-01-01|1992-11-12|NONE|FOB|into beans |
+4261|82|3|2|4|3928.32|0.02|0.07|R|F|1992-12-11|1992-12-18|1992-12-24|DELIVER IN PERSON|FOB|ackages unwind furiously fluff|
+4261|175|5|3|3|3225.51|0.07|0.02|R|F|1992-11-10|1992-12-14|1992-11-17|COLLECT COD|RAIL|ly even deposits eat blithely alo|
+4261|174|3|4|36|38670.12|0.04|0.06|R|F|1992-12-02|1992-12-18|1992-12-25|NONE|REG AIR| slyly pendi|
+4261|24|7|5|28|25872.56|0.07|0.06|A|F|1992-10-08|1992-12-23|1992-10-11|TAKE BACK RETURN|MAIL|packages. fluffily i|
+4262|76|7|1|30|29282.10|0.01|0.03|N|O|1996-08-11|1996-10-11|1996-09-09|TAKE BACK RETURN|RAIL|tes after the carefully|
+4262|96|7|2|5|4980.45|0.02|0.05|N|O|1996-09-27|1996-09-05|1996-10-25|COLLECT COD|SHIP|blithely final asymptotes integrate|
+4262|162|1|3|5|5310.80|0.08|0.00|N|O|1996-10-02|1996-10-16|1996-10-05|NONE|REG AIR|ironic accounts are unusu|
+4262|74|2|4|45|43833.15|0.02|0.01|N|O|1996-11-09|1996-09-09|1996-11-12|DELIVER IN PERSON|SHIP|ackages boost. pending, even instruction|
+4262|100|3|5|28|28002.80|0.06|0.02|N|O|1996-10-22|1996-09-06|1996-11-13|DELIVER IN PERSON|FOB|ironic, regular depend|
+4262|17|7|6|26|23842.26|0.03|0.02|N|O|1996-08-29|1996-09-25|1996-08-31|NONE|RAIL|s boost slyly along the bold, iro|
+4262|160|5|7|41|43466.56|0.03|0.01|N|O|1996-08-28|1996-09-14|1996-09-20|COLLECT COD|RAIL|cuses unwind ac|
+4263|18|9|1|9|8262.09|0.08|0.07|N|O|1998-04-04|1998-04-29|1998-05-04|COLLECT COD|AIR|structions cajole quic|
+4263|196|10|2|28|30693.32|0.05|0.03|N|O|1998-06-24|1998-06-08|1998-07-14|NONE|MAIL|ideas for the carefully re|
+4263|11|1|3|38|34618.38|0.01|0.01|N|O|1998-07-10|1998-05-08|1998-07-17|NONE|TRUCK|rding to the dep|
+4263|19|3|4|20|18380.20|0.02|0.07|N|O|1998-04-09|1998-04-30|1998-05-04|NONE|RAIL|uietly regular deposits. sly deposits w|
+4263|198|2|5|14|15374.66|0.09|0.06|N|O|1998-05-06|1998-04-17|1998-05-11|DELIVER IN PERSON|TRUCK|d accounts. daringly regular accounts hagg|
+4263|113|10|6|47|47616.17|0.08|0.06|N|O|1998-06-28|1998-05-09|1998-07-02|DELIVER IN PERSON|TRUCK|y. theodolites wake idly ironic do|
+4263|29|4|7|6|5574.12|0.04|0.04|N|O|1998-05-01|1998-06-02|1998-05-14|TAKE BACK RETURN|REG AIR|g the final, regular instructions: |
+4288|74|5|1|32|31170.24|0.10|0.07|R|F|1993-03-19|1993-01-26|1993-04-18|TAKE BACK RETURN|AIR|e blithely even instructions. speci|
+4288|105|6|2|39|39198.90|0.05|0.02|R|F|1993-03-25|1993-02-06|1993-03-28|DELIVER IN PERSON|AIR|uffy theodolites run|
+4288|125|8|3|7|7175.84|0.03|0.01|A|F|1993-01-15|1993-02-05|1993-01-26|NONE|TRUCK|ngside of the special platelet|
+4289|196|7|1|19|20827.61|0.06|0.06|R|F|1993-12-31|1993-11-06|1994-01-23|DELIVER IN PERSON|TRUCK|e carefully regular ideas. sl|
+4290|137|3|1|23|23853.99|0.06|0.04|R|F|1995-04-04|1995-02-16|1995-04-07|TAKE BACK RETURN|REG AIR|uests cajole carefully.|
+4290|99|2|2|3|2997.27|0.09|0.03|A|F|1995-03-25|1995-03-07|1995-04-11|NONE|RAIL|lar platelets cajole|
+4291|192|6|1|3|3276.57|0.08|0.08|A|F|1994-03-17|1994-02-21|1994-03-27|COLLECT COD|SHIP|tes sleep slyly above the quickly sl|
+4291|125|8|2|43|44080.16|0.01|0.06|A|F|1994-02-01|1994-02-27|1994-02-06|DELIVER IN PERSON|REG AIR|s. quietly regular |
+4291|8|1|3|25|22700.00|0.09|0.08|R|F|1994-02-14|1994-02-08|1994-03-15|COLLECT COD|AIR|uctions. furiously regular ins|
+4292|44|3|1|22|20768.88|0.08|0.03|R|F|1992-02-14|1992-02-16|1992-03-01|NONE|FOB|refully expres|
+4292|40|6|2|1|940.04|0.03|0.01|A|F|1992-02-07|1992-03-16|1992-02-10|DELIVER IN PERSON|FOB| the furiously ev|
+4292|120|10|3|35|35704.20|0.03|0.06|A|F|1992-03-23|1992-04-04|1992-04-02|COLLECT COD|TRUCK|dugouts use. furiously bold packag|
+4292|163|10|4|40|42526.40|0.05|0.04|A|F|1992-04-27|1992-03-07|1992-05-04|COLLECT COD|REG AIR|ounts according to the furiously |
+4292|131|7|5|6|6186.78|0.07|0.08|R|F|1992-03-03|1992-02-24|1992-03-25|COLLECT COD|FOB|bove the silently regula|
+4292|4|1|6|47|42488.00|0.05|0.00|R|F|1992-05-02|1992-03-21|1992-05-27|TAKE BACK RETURN|FOB|y packages; even ideas boost|
+4293|1|6|1|34|30634.00|0.03|0.08|N|O|1996-11-05|1996-10-12|1996-12-04|NONE|FOB|ions sleep blithely on|
+4293|77|5|2|50|48853.50|0.01|0.05|N|O|1996-11-27|1996-10-30|1996-12-22|COLLECT COD|MAIL| special deposits. furiousl|
+4293|199|1|3|47|51661.93|0.08|0.02|N|O|1996-09-07|1996-10-24|1996-09-15|NONE|RAIL|ithely pending deposits af|
+4293|88|9|4|25|24702.00|0.04|0.04|N|O|1996-09-11|1996-11-14|1996-09-22|DELIVER IN PERSON|FOB|inal asympt|
+4293|181|2|5|1|1081.18|0.06|0.05|N|O|1996-11-15|1996-10-09|1996-11-26|COLLECT COD|AIR|eposits should boost along the |
+4293|79|7|6|45|44058.15|0.10|0.04|N|O|1996-11-04|1996-11-06|1996-11-23|NONE|MAIL|lar ideas use carefully|
+4294|105|8|1|19|19096.90|0.03|0.04|A|F|1992-10-16|1992-11-13|1992-10-26|DELIVER IN PERSON|AIR|nt dependencies. furiously regular ideas d|
+4294|27|2|2|16|14832.32|0.01|0.02|R|F|1992-08-17|1992-09-24|1992-09-04|TAKE BACK RETURN|REG AIR|lithely pint|
+4294|198|1|3|30|32945.70|0.01|0.00|A|F|1992-09-12|1992-11-06|1992-09-25|NONE|MAIL|olites. bold foxes affix ironic theodolite|
+4294|105|2|4|34|34173.40|0.02|0.01|R|F|1992-09-09|1992-11-06|1992-10-04|TAKE BACK RETURN|REG AIR|pendencies!|
+4294|119|3|5|37|37707.07|0.05|0.01|R|F|1992-09-07|1992-10-13|1992-09-08|NONE|REG AIR|cial packages nag f|
+4294|87|8|6|42|41457.36|0.02|0.03|A|F|1992-09-30|1992-11-13|1992-10-15|DELIVER IN PERSON|FOB| carefully; furiously ex|
+4294|175|3|7|47|50532.99|0.02|0.08|R|F|1992-11-09|1992-11-03|1992-12-05|TAKE BACK RETURN|SHIP|es. blithely r|
+4295|29|2|1|49|45521.98|0.09|0.01|N|O|1996-05-25|1996-03-17|1996-06-19|TAKE BACK RETURN|REG AIR|refully silent requests. f|
+4295|71|9|2|4|3884.28|0.09|0.07|N|O|1996-06-05|1996-04-26|1996-06-13|DELIVER IN PERSON|TRUCK|arefully according to the pending ac|
+4295|193|4|3|3|3279.57|0.04|0.00|N|O|1996-06-04|1996-04-24|1996-06-24|DELIVER IN PERSON|AIR|telets cajole bravely|
+4295|80|9|4|30|29402.40|0.07|0.06|N|O|1996-03-22|1996-04-23|1996-04-20|NONE|SHIP|yly ironic frets. pending foxes after |
+4320|46|5|1|28|26489.12|0.02|0.06|N|O|1997-01-28|1997-02-07|1997-02-07|COLLECT COD|FOB|nts. even, ironic excuses hagg|
+4320|140|6|2|6|6240.84|0.08|0.08|N|O|1997-01-11|1997-01-26|1997-01-22|DELIVER IN PERSON|SHIP|against the carefully careful asym|
+4320|188|9|3|33|35909.94|0.09|0.02|N|O|1996-12-11|1997-02-27|1997-01-08|TAKE BACK RETURN|SHIP|ess asymptotes so|
+4321|147|6|1|33|34555.62|0.09|0.02|A|F|1994-09-01|1994-08-17|1994-09-05|DELIVER IN PERSON|TRUCK|yly special excuses. fluffily |
+4321|54|2|2|45|42932.25|0.00|0.08|R|F|1994-11-13|1994-09-15|1994-11-18|DELIVER IN PERSON|SHIP| haggle ironically bold theodolites. quick|
+4321|186|7|3|23|24982.14|0.01|0.05|A|F|1994-11-03|1994-10-08|1994-11-06|DELIVER IN PERSON|SHIP|ly even orbits slee|
+4321|91|2|4|4|3964.36|0.02|0.00|R|F|1994-09-10|1994-10-06|1994-09-11|NONE|FOB|ironic deposi|
+4321|172|2|5|10|10721.70|0.04|0.03|A|F|1994-09-07|1994-08-23|1994-09-17|TAKE BACK RETURN|SHIP|wake carefully alongside of |
+4322|69|4|1|39|37793.34|0.04|0.02|N|O|1998-04-27|1998-06-03|1998-05-04|TAKE BACK RETURN|MAIL|its integrate fluffily |
+4322|140|1|2|9|9361.26|0.05|0.08|N|O|1998-05-18|1998-04-27|1998-05-28|COLLECT COD|AIR|ual instructio|
+4322|8|9|3|12|10896.00|0.09|0.05|N|O|1998-03-29|1998-06-05|1998-04-16|DELIVER IN PERSON|TRUCK|e blithely against the slyly unusu|
+4322|46|7|4|17|16082.68|0.09|0.08|N|O|1998-05-31|1998-05-31|1998-06-10|TAKE BACK RETURN|FOB|ructions boost |
+4322|102|7|5|10|10021.00|0.00|0.05|N|O|1998-05-31|1998-04-27|1998-06-25|TAKE BACK RETURN|REG AIR| regular ideas engage carefully quick|
+4322|60|8|6|39|37442.34|0.09|0.08|N|O|1998-03-16|1998-05-21|1998-04-11|COLLECT COD|AIR|ccounts. dogged pin|
+4322|14|4|7|34|31076.34|0.05|0.00|N|O|1998-05-27|1998-04-12|1998-06-16|NONE|REG AIR|ounts haggle fluffily ideas. pend|
+4323|1|2|1|33|29733.00|0.09|0.02|A|F|1994-05-04|1994-03-06|1994-05-23|COLLECT COD|TRUCK|the slyly bold deposits slee|
+4324|51|2|1|44|41846.20|0.05|0.04|N|O|1995-10-15|1995-09-07|1995-11-07|DELIVER IN PERSON|AIR|ainst the u|
+4324|48|7|2|12|11376.48|0.04|0.02|N|O|1995-10-05|1995-09-07|1995-10-18|NONE|REG AIR|c packages. furiously express sauternes|
+4324|82|3|3|14|13749.12|0.07|0.06|N|O|1995-11-12|1995-08-26|1995-11-21|COLLECT COD|AIR| packages nag express excuses. qui|
+4324|50|7|4|14|13300.70|0.02|0.04|N|O|1995-09-20|1995-10-08|1995-10-06|COLLECT COD|RAIL| express ideas. blithely blit|
+4324|84|5|5|22|21649.76|0.07|0.03|N|O|1995-09-13|1995-10-04|1995-09-23|DELIVER IN PERSON|SHIP|ke express, special ideas.|
+4324|43|2|6|31|29234.24|0.08|0.04|N|O|1995-10-23|1995-09-14|1995-11-09|COLLECT COD|RAIL|efully flu|
+4324|154|6|7|46|48490.90|0.00|0.03|N|O|1995-11-03|1995-09-28|1995-11-22|NONE|SHIP|ular, final theodo|
+4325|160|2|1|18|19082.88|0.01|0.07|N|O|1996-10-07|1996-09-28|1996-10-31|DELIVER IN PERSON|RAIL|. blithely|
+4326|163|4|1|11|11694.76|0.01|0.01|N|O|1997-02-02|1996-12-10|1997-02-20|DELIVER IN PERSON|TRUCK|press reque|
+4326|167|6|2|27|28813.32|0.06|0.01|N|O|1996-11-29|1997-01-20|1996-12-23|COLLECT COD|AIR|inal packages. final asymptotes about t|
+4327|95|8|1|18|17911.62|0.08|0.00|N|F|1995-06-16|1995-04-20|1995-07-12|COLLECT COD|RAIL|y final excuses. ironic, special requests a|
+4327|106|9|2|40|40244.00|0.07|0.01|N|F|1995-05-26|1995-04-17|1995-06-18|NONE|AIR|quests. packages are after th|
+4327|145|2|3|11|11496.54|0.10|0.07|R|F|1995-04-24|1995-05-27|1995-05-24|TAKE BACK RETURN|FOB| ironic dolphins|
+4327|21|10|4|8|7368.16|0.04|0.08|N|F|1995-05-26|1995-05-28|1995-06-19|DELIVER IN PERSON|AIR|eodolites cajole; unusual Tiresias|
+4327|190|1|5|39|42517.41|0.01|0.00|N|O|1995-06-23|1995-04-18|1995-07-13|TAKE BACK RETURN|FOB|kages against the blit|
+4327|152|4|6|10|10521.50|0.00|0.06|A|F|1995-04-28|1995-06-11|1995-05-07|TAKE BACK RETURN|TRUCK|arefully sile|
+4352|106|9|1|18|18109.80|0.00|0.03|N|O|1998-02-27|1998-02-02|1998-03-01|DELIVER IN PERSON|RAIL|ding to th|
+4353|94|8|1|22|21869.98|0.05|0.05|N|O|1998-01-19|1998-01-23|1998-02-10|COLLECT COD|FOB|ent packages. accounts are slyly. |
+4354|15|9|1|30|27450.30|0.08|0.07|R|F|1995-01-27|1994-11-24|1995-02-25|TAKE BACK RETURN|REG AIR|around the ir|
+4354|153|8|2|23|24222.45|0.01|0.08|R|F|1994-11-20|1994-12-23|1994-11-27|TAKE BACK RETURN|AIR|kly along the ironic, ent|
+4354|51|6|3|2|1902.10|0.10|0.04|A|F|1995-01-09|1994-12-15|1995-01-24|TAKE BACK RETURN|REG AIR|s nag quickly |
+4354|86|7|4|36|35498.88|0.05|0.05|A|F|1994-11-20|1994-12-06|1994-12-06|DELIVER IN PERSON|AIR| wake slyly eve|
+4354|65|10|5|37|35707.22|0.06|0.02|R|F|1995-01-13|1994-12-29|1995-01-31|DELIVER IN PERSON|FOB|deas use blithely! special foxes print af|
+4354|108|3|6|36|36291.60|0.03|0.04|R|F|1994-12-03|1994-12-05|1995-01-02|TAKE BACK RETURN|TRUCK|efully special packages use fluffily|
+4354|139|5|7|18|18704.34|0.03|0.04|A|F|1994-12-07|1994-12-11|1994-12-11|TAKE BACK RETURN|SHIP|ross the furiously |
+4355|195|7|1|32|35046.08|0.10|0.02|N|O|1996-12-29|1997-02-08|1997-01-24|DELIVER IN PERSON|REG AIR|y silent deposits. b|
+4355|17|1|2|4|3668.04|0.05|0.02|N|O|1997-02-25|1997-01-29|1997-03-17|TAKE BACK RETURN|TRUCK|slyly blithely regular packag|
+4355|1|2|3|13|11713.00|0.07|0.05|N|O|1997-01-21|1996-12-22|1997-02-14|COLLECT COD|TRUCK| ought to mold. blithely pending ideas |
+4355|194|6|4|14|15318.66|0.04|0.02|N|O|1997-03-08|1997-01-22|1997-03-26|NONE|RAIL|he furiously ironic accounts. quickly iro|
+4355|31|7|5|50|46551.50|0.10|0.00|N|O|1996-11-25|1997-01-01|1996-12-06|DELIVER IN PERSON|REG AIR| regular accounts boost along the |
+4355|122|7|6|35|35774.20|0.00|0.08|N|O|1997-01-28|1997-01-28|1997-02-20|NONE|FOB|ess accounts affix ironic|
+4355|101|4|7|47|47051.70|0.09|0.02|N|O|1996-12-28|1996-12-29|1997-01-09|NONE|RAIL|e. realms integrate |
+4356|194|5|1|35|38296.65|0.00|0.04|R|F|1994-05-30|1994-06-14|1994-06-08|COLLECT COD|MAIL|arefully ironic |
+4357|84|5|1|50|49204.00|0.04|0.07|N|O|1997-11-25|1997-12-03|1997-12-17|DELIVER IN PERSON|RAIL|s. final, e|
+4357|108|9|2|17|17137.70|0.10|0.07|N|O|1998-02-01|1997-12-08|1998-02-09|DELIVER IN PERSON|MAIL|e carefully furiou|
+4358|126|5|1|47|48227.64|0.04|0.00|N|O|1997-10-15|1997-10-14|1997-11-04|DELIVER IN PERSON|SHIP|refully busy dep|
+4359|174|3|1|41|44040.97|0.03|0.07|A|F|1993-04-06|1993-05-06|1993-04-14|COLLECT COD|RAIL|s affix sly|
+4359|153|8|2|8|8425.20|0.03|0.08|R|F|1993-06-27|1993-05-16|1993-07-04|DELIVER IN PERSON|MAIL|packages affix. fluffily regular f|
+4359|193|6|3|32|34982.08|0.10|0.03|R|F|1993-06-18|1993-04-04|1993-07-18|COLLECT COD|MAIL|olites nag quietly caref|
+4359|78|8|4|1|978.07|0.05|0.03|R|F|1993-04-27|1993-05-09|1993-05-08|NONE|MAIL| fluffily ironic, bold pac|
+4359|33|4|5|22|20526.66|0.04|0.01|A|F|1993-03-28|1993-06-01|1993-04-13|NONE|REG AIR|accounts wake ironic deposits. ironic|
+4384|136|7|1|5|5180.65|0.09|0.01|A|F|1992-08-22|1992-08-24|1992-09-20|DELIVER IN PERSON|MAIL|instructions sleep. blithely express pa|
+4384|89|10|2|38|37585.04|0.07|0.06|A|F|1992-10-18|1992-09-24|1992-11-04|NONE|FOB|ly final requests. regu|
+4384|89|10|3|11|10879.88|0.05|0.04|R|F|1992-08-31|1992-10-04|1992-09-28|TAKE BACK RETURN|FOB|deposits promise carefully even, regular e|
+4385|111|8|1|38|38422.18|0.00|0.02|N|O|1996-11-22|1996-10-30|1996-12-21|DELIVER IN PERSON|TRUCK|inal frays. final, bold exc|
+4386|130|3|1|10|10301.30|0.05|0.07|N|O|1998-06-03|1998-04-16|1998-06-28|TAKE BACK RETURN|MAIL|gainst the quickly expre|
+4386|118|2|2|28|28507.08|0.03|0.06|N|O|1998-03-19|1998-05-01|1998-03-27|NONE|FOB|. quick packages play slyly |
+4386|140|1|3|4|4160.56|0.07|0.05|N|O|1998-04-07|1998-03-25|1998-04-19|COLLECT COD|FOB|ns wake carefully carefully iron|
+4386|121|2|4|21|21443.52|0.09|0.00|N|O|1998-05-05|1998-03-19|1998-05-13|NONE|RAIL|e pending, sp|
+4386|130|3|5|39|40175.07|0.09|0.06|N|O|1998-03-05|1998-03-15|1998-03-16|NONE|RAIL|structions cajole quickly express|
+4386|90|1|6|18|17821.62|0.02|0.05|N|O|1998-04-12|1998-04-09|1998-05-12|TAKE BACK RETURN|SHIP| deposits use according to the pending, |
+4386|20|4|7|16|14720.32|0.07|0.02|N|O|1998-05-05|1998-03-17|1998-06-03|COLLECT COD|AIR|e furiously final pint|
+4387|122|5|1|3|3066.36|0.02|0.01|N|O|1996-01-17|1996-01-14|1996-01-28|COLLECT COD|AIR| boost slyly ironic instructions. furiou|
+4387|177|5|2|48|51704.16|0.06|0.05|N|O|1995-10-29|1995-12-11|1995-11-01|NONE|REG AIR|sleep slyly. blithely sl|
+4387|2|5|3|15|13530.00|0.00|0.03|N|O|1996-01-11|1996-01-14|1996-01-30|TAKE BACK RETURN|REG AIR|s hinder quietly across the pla|
+4387|47|8|4|9|8523.36|0.00|0.03|N|O|1996-01-04|1995-12-26|1996-01-12|DELIVER IN PERSON|REG AIR|c ideas. slyly regular packages sol|
+4387|82|3|5|3|2946.24|0.05|0.08|N|O|1995-11-17|1995-12-28|1995-11-25|COLLECT COD|SHIP| pinto beans |
+4387|6|3|6|40|36240.00|0.02|0.04|N|O|1995-11-29|1995-12-10|1995-12-20|NONE|REG AIR|deas according to the blithely regular fox|
+4388|65|10|1|30|28951.80|0.02|0.07|N|O|1996-06-07|1996-05-07|1996-06-22|DELIVER IN PERSON|FOB|s cajole fluffil|
+4388|84|5|2|28|27554.24|0.05|0.04|N|O|1996-05-08|1996-06-20|1996-05-12|TAKE BACK RETURN|RAIL|ove the ide|
+4388|52|4|3|13|12376.65|0.07|0.05|N|O|1996-06-28|1996-05-23|1996-07-04|DELIVER IN PERSON|REG AIR|ly even, expre|
+4389|157|2|1|20|21143.00|0.08|0.00|A|F|1994-06-06|1994-06-17|1994-06-17|DELIVER IN PERSON|SHIP|ng the carefully express d|
+4389|153|5|2|13|13690.95|0.00|0.00|A|F|1994-08-18|1994-06-06|1994-08-20|NONE|RAIL|nal, regula|
+4389|79|9|3|39|38183.73|0.04|0.07|A|F|1994-06-08|1994-06-04|1994-06-10|TAKE BACK RETURN|TRUCK| unusual, final excuses cajole carefully |
+4389|160|2|4|5|5300.80|0.09|0.00|A|F|1994-09-03|1994-06-23|1994-09-16|NONE|FOB| ironic request|
+4389|11|5|5|22|20042.22|0.08|0.00|R|F|1994-07-05|1994-06-12|1994-07-12|NONE|TRUCK|lly silent de|
+4389|2|3|6|22|19844.00|0.01|0.04|R|F|1994-06-07|1994-06-29|1994-06-19|COLLECT COD|TRUCK|at the final excuses hinder carefully a|
+4389|185|6|7|4|4340.72|0.09|0.08|R|F|1994-06-14|1994-06-30|1994-07-06|NONE|REG AIR| blithely even d|
+4390|152|10|1|35|36825.25|0.07|0.04|R|F|1995-05-30|1995-07-02|1995-06-15|DELIVER IN PERSON|TRUCK|ongside of the slyly regular ideas|
+4390|196|8|2|28|30693.32|0.03|0.00|N|O|1995-09-07|1995-06-22|1995-10-05|COLLECT COD|SHIP|ld braids haggle atop the for|
+4390|101|8|3|42|42046.20|0.05|0.08|A|F|1995-06-12|1995-07-16|1995-06-17|NONE|AIR|arefully even accoun|
+4390|98|2|4|32|31938.88|0.07|0.08|N|O|1995-09-15|1995-08-12|1995-10-05|TAKE BACK RETURN|TRUCK|ctions across|
+4391|161|10|1|1|1061.16|0.09|0.00|R|F|1992-06-18|1992-04-27|1992-06-20|COLLECT COD|TRUCK|ong the silent deposits|
+4391|187|8|2|45|48923.10|0.07|0.04|R|F|1992-04-01|1992-05-01|1992-04-13|TAKE BACK RETURN|TRUCK|ep quickly after |
+4416|94|7|1|37|36781.33|0.08|0.03|A|F|1992-10-23|1992-08-23|1992-11-16|COLLECT COD|RAIL|fluffily ironic |
+4416|89|10|2|3|2967.24|0.06|0.03|R|F|1992-10-22|1992-08-06|1992-11-13|DELIVER IN PERSON|SHIP| requests sleep along the |
+4416|9|6|3|45|40905.00|0.09|0.03|A|F|1992-10-16|1992-09-09|1992-10-28|COLLECT COD|AIR|the final pinto beans. special frets |
+4417|75|5|1|28|27301.96|0.08|0.02|N|O|1998-09-04|1998-10-04|1998-09-19|TAKE BACK RETURN|REG AIR|ies across the furious|
+4417|181|2|2|1|1081.18|0.06|0.08|N|O|1998-10-23|1998-08-22|1998-10-24|NONE|REG AIR|press deposits promise stealthily amo|
+4417|98|2|3|35|34933.15|0.06|0.04|N|O|1998-08-08|1998-09-23|1998-09-02|DELIVER IN PERSON|FOB|slyly regular, silent courts. even packag|
+4418|35|1|1|32|29920.96|0.02|0.06|A|F|1993-05-28|1993-06-02|1993-05-30|TAKE BACK RETURN|RAIL|ly. bold pinto b|
+4418|22|5|2|14|12908.28|0.03|0.04|A|F|1993-05-20|1993-06-18|1993-06-05|TAKE BACK RETURN|SHIP| blithely regular requests. blith|
+4418|79|7|3|3|2937.21|0.00|0.02|R|F|1993-04-08|1993-06-04|1993-05-02|NONE|SHIP|luffily across the unusual ideas. reque|
+4419|108|9|1|45|45364.50|0.01|0.05|N|O|1996-07-20|1996-09-07|1996-08-18|DELIVER IN PERSON|TRUCK|s doze sometimes fluffily regular a|
+4419|32|8|2|42|39145.26|0.00|0.03|N|O|1996-09-18|1996-07-25|1996-09-21|COLLECT COD|RAIL|sts. furious|
+4419|132|3|3|6|6192.78|0.02|0.08|N|O|1996-06-25|1996-09-04|1996-07-20|DELIVER IN PERSON|AIR|ts wake slyly final dugou|
+4420|8|5|1|7|6356.00|0.07|0.03|R|F|1994-08-30|1994-09-03|1994-09-25|NONE|FOB| regular instructions sleep around|
+4421|98|2|1|37|36929.33|0.09|0.08|N|O|1997-07-22|1997-06-27|1997-07-25|DELIVER IN PERSON|SHIP|l accounts. ironic request|
+4421|56|1|2|46|43978.30|0.04|0.04|N|O|1997-04-21|1997-05-13|1997-05-15|DELIVER IN PERSON|FOB|reful packages. bold, |
+4421|167|6|3|46|49089.36|0.00|0.06|N|O|1997-05-25|1997-05-21|1997-06-23|COLLECT COD|TRUCK|g dependenci|
+4421|191|4|4|32|34918.08|0.06|0.04|N|O|1997-07-09|1997-06-03|1997-07-25|NONE|SHIP|ar ideas eat among the furiousl|
+4421|190|1|5|32|34886.08|0.06|0.04|N|O|1997-07-28|1997-06-14|1997-08-13|NONE|REG AIR|uickly final pinto beans impress. bold |
+4421|47|6|6|44|41669.76|0.09|0.06|N|O|1997-06-17|1997-06-20|1997-06-29|NONE|TRUCK|le carefully. bl|
+4421|116|3|7|18|18289.98|0.01|0.00|N|O|1997-06-07|1997-05-13|1997-06-10|DELIVER IN PERSON|FOB|. regular, s|
+4422|135|1|1|5|5175.65|0.09|0.07|N|O|1995-07-17|1995-08-13|1995-07-25|NONE|SHIP|e furiously about t|
+4422|48|5|2|41|38869.64|0.08|0.05|N|F|1995-06-12|1995-07-09|1995-06-20|COLLECT COD|TRUCK| theodolites shal|
+4422|103|10|3|39|39120.90|0.00|0.05|N|O|1995-09-02|1995-06-24|1995-09-14|NONE|TRUCK|en hockey players engage|
+4422|153|4|4|4|4212.60|0.02|0.05|N|O|1995-09-18|1995-08-12|1995-10-18|COLLECT COD|FOB|cies along the bo|
+4422|80|9|5|20|19601.60|0.07|0.05|N|O|1995-08-17|1995-07-16|1995-09-13|DELIVER IN PERSON|RAIL|ructions wake slyly al|
+4423|150|9|1|3|3150.45|0.03|0.00|A|F|1995-03-22|1995-04-06|1995-04-19|NONE|TRUCK| final theodolites nag after the bli|
+4423|60|5|2|2|1920.12|0.07|0.04|A|F|1995-03-04|1995-04-04|1995-03-08|TAKE BACK RETURN|REG AIR|old sheaves sleep|
+4448|52|7|1|24|22849.20|0.10|0.07|N|O|1998-09-09|1998-07-06|1998-09-27|DELIVER IN PERSON|SHIP|nal packages along the ironic instructi|
+4448|189|10|2|13|14159.34|0.00|0.01|N|O|1998-07-26|1998-07-03|1998-08-14|COLLECT COD|MAIL|fluffily express accounts integrate furiou|
+4448|41|4|3|35|32936.40|0.10|0.06|N|O|1998-09-18|1998-07-27|1998-10-08|NONE|REG AIR|aggle carefully alongside of the q|
+4448|141|2|4|3|3123.42|0.01|0.01|N|O|1998-07-20|1998-07-10|1998-08-07|DELIVER IN PERSON|TRUCK|ronic theod|
+4448|91|2|5|41|40634.69|0.00|0.08|N|O|1998-07-30|1998-08-09|1998-08-03|NONE|AIR|pon the permanently even excuses nag |
+4448|172|3|6|12|12866.04|0.06|0.03|N|O|1998-08-21|1998-06-30|1998-09-09|COLLECT COD|RAIL|sits about the ironic, bu|
+4449|32|3|1|42|39145.26|0.10|0.07|N|O|1998-03-22|1998-05-09|1998-04-03|NONE|FOB| packages. blithely final |
+4449|141|8|2|10|10411.40|0.02|0.03|N|O|1998-05-09|1998-05-04|1998-05-15|NONE|SHIP|ccounts alongside of the platelets integr|
+4450|174|5|1|44|47263.48|0.10|0.00|N|O|1997-10-12|1997-10-13|1997-10-29|DELIVER IN PERSON|RAIL| the slyly eve|
+4450|15|6|2|9|8235.09|0.03|0.03|N|O|1997-08-13|1997-08-16|1997-08-15|NONE|FOB|gular requests cajole carefully. regular c|
+4450|96|8|3|45|44824.05|0.08|0.01|N|O|1997-09-01|1997-10-06|1997-09-19|NONE|TRUCK|express ideas are furiously regular|
+4450|62|9|4|13|12506.78|0.00|0.00|N|O|1997-08-26|1997-09-18|1997-09-20|COLLECT COD|MAIL| brave foxes. slyly unusual|
+4450|56|7|5|6|5736.30|0.09|0.01|N|O|1997-09-02|1997-09-30|1997-09-09|NONE|FOB|eposits. foxes cajole unusual fox|
+4451|164|5|1|40|42566.40|0.03|0.03|A|F|1994-11-18|1994-12-25|1994-11-26|DELIVER IN PERSON|RAIL|y. slyly special deposits are sly|
+4451|63|4|2|34|32744.04|0.10|0.02|A|F|1994-11-30|1994-12-04|1994-12-13|COLLECT COD|SHIP| regular ideas.|
+4451|159|10|3|19|20123.85|0.05|0.06|R|F|1994-10-09|1994-11-26|1994-10-23|COLLECT COD|FOB|ly after the fluffi|
+4452|114|8|1|21|21296.31|0.07|0.03|R|F|1994-10-06|1994-08-23|1994-10-15|COLLECT COD|TRUCK|multipliers x-ray carefully in place of |
+4452|1|8|2|47|42347.00|0.01|0.06|A|F|1994-10-08|1994-08-09|1994-10-09|TAKE BACK RETURN|TRUCK|ts. slyly regular cour|
+4453|147|10|1|41|42932.74|0.00|0.08|N|O|1997-07-17|1997-05-15|1997-07-31|NONE|REG AIR|anent theodolites are slyly except t|
+4453|133|4|2|16|16530.08|0.03|0.00|N|O|1997-07-22|1997-05-05|1997-08-03|COLLECT COD|FOB|ar excuses nag quickly even accounts. b|
+4453|62|7|3|48|46178.88|0.02|0.07|N|O|1997-05-29|1997-06-24|1997-06-03|NONE|SHIP|eep. fluffily express accounts at the furi|
+4453|102|5|4|26|26054.60|0.06|0.07|N|O|1997-05-07|1997-06-07|1997-05-22|NONE|TRUCK|express packages are|
+4454|151|9|1|20|21023.00|0.10|0.08|R|F|1994-05-06|1994-03-17|1994-05-20|COLLECT COD|SHIP|lar theodolites. even instructio|
+4454|152|10|2|22|23147.30|0.06|0.02|A|F|1994-02-06|1994-04-11|1994-03-06|DELIVER IN PERSON|RAIL|ully. carefully final accounts accordi|
+4454|192|3|3|45|49148.55|0.07|0.04|A|F|1994-03-29|1994-03-26|1994-04-04|TAKE BACK RETURN|RAIL|ests promise. packages print fur|
+4454|2|3|4|1|902.00|0.09|0.05|A|F|1994-02-05|1994-04-19|1994-02-12|COLLECT COD|RAIL|equests run.|
+4454|52|4|5|48|45698.40|0.00|0.07|R|F|1994-04-23|1994-04-03|1994-04-26|COLLECT COD|FOB|to beans wake across th|
+4454|160|8|6|20|21203.20|0.10|0.03|A|F|1994-04-08|1994-03-06|1994-04-26|DELIVER IN PERSON|TRUCK|quickly regular requests. furiously|
+4455|70|5|1|20|19401.40|0.01|0.05|A|F|1994-01-31|1993-11-21|1994-03-02|DELIVER IN PERSON|MAIL| express packages. packages boost quickly|
+4455|153|4|2|47|49498.05|0.09|0.01|R|F|1994-01-01|1993-12-25|1994-01-05|COLLECT COD|FOB| requests. even, even accou|
+4455|123|2|3|34|34786.08|0.00|0.06|A|F|1993-10-24|1993-11-27|1993-11-04|TAKE BACK RETURN|AIR| slyly ironic requests. quickly even d|
+4480|108|5|1|30|30243.00|0.08|0.03|R|F|1994-07-29|1994-06-22|1994-08-01|NONE|FOB|ven braids us|
+4481|24|9|1|50|46201.00|0.02|0.06|N|O|1996-07-22|1996-05-13|1996-08-14|DELIVER IN PERSON|RAIL|ar packages. regula|
+4481|190|1|2|27|29435.13|0.02|0.03|N|O|1996-04-06|1996-05-17|1996-04-12|TAKE BACK RETURN|AIR|ackages haggle even, |
+4482|71|2|1|32|31074.24|0.06|0.03|A|F|1995-05-16|1995-07-22|1995-06-07|NONE|RAIL| quickly pendin|
+4482|96|9|2|32|31874.88|0.01|0.06|N|O|1995-08-16|1995-06-26|1995-09-10|DELIVER IN PERSON|AIR|eans wake according |
+4483|6|7|1|32|28992.00|0.07|0.07|R|F|1992-04-05|1992-05-25|1992-04-08|DELIVER IN PERSON|MAIL|ests haggle. slyl|
+4483|62|1|2|50|48103.00|0.01|0.06|A|F|1992-06-19|1992-05-12|1992-07-08|DELIVER IN PERSON|TRUCK|ag blithely even|
+4483|9|4|3|50|45450.00|0.00|0.04|R|F|1992-06-10|1992-04-18|1992-06-17|DELIVER IN PERSON|MAIL|ackages. furiously ironi|
+4484|95|9|1|4|3980.36|0.06|0.03|N|O|1997-04-09|1997-02-11|1997-04-12|TAKE BACK RETURN|TRUCK|packages de|
+4484|137|8|2|39|40448.07|0.05|0.02|N|O|1997-04-01|1997-01-26|1997-04-21|NONE|RAIL|onic accounts wake blithel|
+4484|190|1|3|38|41427.22|0.06|0.07|N|O|1997-03-07|1997-01-31|1997-04-01|COLLECT COD|REG AIR|. even requests un|
+4484|122|5|4|41|41906.92|0.06|0.03|N|O|1997-01-25|1997-02-15|1997-01-29|TAKE BACK RETURN|REG AIR|ress accounts. ironic deposits unwind fur|
+4484|3|4|5|42|37926.00|0.03|0.07|N|O|1997-03-25|1997-02-21|1997-04-05|DELIVER IN PERSON|REG AIR|ding, pending requests wake. fluffily |
+4484|36|7|6|29|27144.87|0.09|0.06|N|O|1996-12-27|1997-03-10|1997-01-13|NONE|FOB| wake blithely ironic|
+4484|103|8|7|50|50155.00|0.07|0.01|N|O|1997-03-17|1997-03-16|1997-03-21|COLLECT COD|FOB|the ironic, final theodo|
+4485|191|5|1|1|1091.19|0.03|0.05|R|F|1994-12-04|1995-02-07|1994-12-09|NONE|AIR|play according to the ironic, ironic|
+4485|141|10|2|46|47892.44|0.04|0.06|R|F|1995-03-09|1994-12-14|1995-03-23|DELIVER IN PERSON|AIR|. ironic foxes haggle. regular war|
+4485|175|6|3|43|46232.31|0.01|0.05|R|F|1995-01-17|1995-02-11|1995-02-07|DELIVER IN PERSON|TRUCK|al accounts according to the slyly r|
+4485|144|5|4|43|44898.02|0.08|0.06|R|F|1995-01-28|1995-01-26|1995-02-07|DELIVER IN PERSON|AIR|. blithely|
+4485|6|7|5|47|42582.00|0.08|0.04|R|F|1995-03-11|1995-01-11|1995-03-21|TAKE BACK RETURN|RAIL|luffily pending acc|
+4486|135|1|1|46|47615.98|0.08|0.00|N|O|1998-05-02|1998-04-05|1998-05-08|COLLECT COD|MAIL|ackages. specia|
+4486|49|2|2|19|18031.76|0.10|0.01|N|O|1998-06-07|1998-05-28|1998-07-02|NONE|MAIL|pending foxes after|
+4486|96|7|3|47|46816.23|0.02|0.07|N|O|1998-04-09|1998-05-24|1998-05-07|DELIVER IN PERSON|MAIL|ts around the quiet packages ar|
+4486|91|4|4|28|27750.52|0.07|0.02|N|O|1998-04-21|1998-04-19|1998-04-26|TAKE BACK RETURN|AIR|to the furious, regular foxes play abov|
+4487|138|4|1|37|38410.81|0.03|0.07|R|F|1993-02-28|1993-04-18|1993-03-17|TAKE BACK RETURN|MAIL|bove the fu|
+4487|113|10|2|49|49642.39|0.10|0.00|R|F|1993-06-13|1993-05-08|1993-07-10|COLLECT COD|FOB|sual packages should ha|
+4487|190|1|3|1|1090.19|0.02|0.07|A|F|1993-05-11|1993-05-23|1993-05-17|TAKE BACK RETURN|FOB|ithely final asym|
+4487|93|4|4|25|24827.25|0.07|0.03|A|F|1993-03-09|1993-04-27|1993-03-30|COLLECT COD|RAIL|g the final instructions. slyly c|
+4512|162|1|1|30|31864.80|0.07|0.07|N|O|1996-01-28|1995-12-22|1996-02-22|TAKE BACK RETURN|TRUCK|ly unusual package|
+4512|41|4|2|24|22584.96|0.04|0.06|N|O|1995-12-16|1996-01-16|1995-12-25|NONE|SHIP|ly regular pinto beans. carefully bold depo|
+4512|145|8|3|21|21947.94|0.00|0.00|N|O|1995-10-31|1995-12-30|1995-11-15|NONE|REG AIR|lly unusual pinto b|
+4512|141|2|4|32|33316.48|0.10|0.01|N|O|1995-11-25|1995-12-28|1995-12-06|NONE|FOB|counts are against the quickly regular |
+4512|133|4|5|43|44424.59|0.06|0.00|N|O|1995-12-20|1995-11-28|1996-01-14|NONE|AIR|are carefully. theodolites wake|
+4513|170|1|1|29|31034.93|0.03|0.01|N|O|1996-05-18|1996-05-23|1996-06-08|NONE|REG AIR|cajole. regular packages boost. s|
+4513|70|9|2|39|37832.73|0.01|0.04|N|O|1996-06-25|1996-05-14|1996-07-24|NONE|MAIL|slyly furiously unusual deposits. blit|
+4513|138|4|3|34|35296.42|0.00|0.03|N|O|1996-03-27|1996-06-12|1996-04-06|DELIVER IN PERSON|SHIP|sits. quickly even instructions |
+4513|192|6|4|13|14198.47|0.08|0.08|N|O|1996-04-12|1996-05-19|1996-04-25|DELIVER IN PERSON|AIR|l, final excuses detect furi|
+4514|164|9|1|27|28732.32|0.06|0.06|R|F|1994-07-01|1994-07-13|1994-07-26|COLLECT COD|AIR| even, silent foxes be|
+4514|46|3|2|15|14190.60|0.10|0.04|R|F|1994-08-24|1994-07-11|1994-09-14|DELIVER IN PERSON|RAIL|! unusual, special deposits afte|
+4514|78|8|3|10|9780.70|0.09|0.05|A|F|1994-06-19|1994-06-25|1994-07-01|COLLECT COD|SHIP|ake furiously. carefully regular requests|
+4514|81|2|4|9|8829.72|0.10|0.03|A|F|1994-08-04|1994-07-01|1994-09-01|DELIVER IN PERSON|REG AIR|wly. quick|
+4514|149|8|5|12|12589.68|0.02|0.03|R|F|1994-08-20|1994-06-09|1994-09-15|TAKE BACK RETURN|FOB| carefully ironic foxes nag caref|
+4514|189|10|6|38|41388.84|0.03|0.05|A|F|1994-07-28|1994-07-06|1994-08-25|NONE|AIR|ending excuses. sl|
+4514|177|8|7|27|29083.59|0.04|0.06|A|F|1994-06-24|1994-07-14|1994-06-30|TAKE BACK RETURN|TRUCK|. slyly sile|
+4515|39|10|1|15|14085.45|0.06|0.01|R|F|1992-05-26|1992-05-25|1992-06-03|NONE|SHIP|posits wake|
+4515|103|10|2|50|50155.00|0.06|0.03|A|F|1992-03-28|1992-05-16|1992-04-20|NONE|AIR|ding instructions again|
+4515|154|6|3|27|28462.05|0.09|0.01|A|F|1992-06-06|1992-06-08|1992-06-07|DELIVER IN PERSON|REG AIR| against the even re|
+4515|54|5|4|32|30529.60|0.06|0.03|R|F|1992-04-07|1992-05-11|1992-04-09|COLLECT COD|MAIL|carefully express depo|
+4515|45|8|5|22|20790.88|0.09|0.07|A|F|1992-07-16|1992-05-07|1992-07-23|NONE|SHIP|le quickly above the even, bold ideas.|
+4515|180|8|6|23|24844.14|0.04|0.00|R|F|1992-05-23|1992-06-15|1992-06-20|TAKE BACK RETURN|FOB|ns. bold r|
+4516|170|9|1|34|36385.78|0.05|0.04|A|F|1994-05-16|1994-06-23|1994-06-12|NONE|SHIP|even pinto beans wake qui|
+4517|43|4|1|50|47152.00|0.01|0.02|N|O|1998-06-08|1998-04-18|1998-06-20|DELIVER IN PERSON|MAIL|refully pending acco|
+4518|144|7|1|9|9397.26|0.09|0.04|N|O|1997-06-26|1997-07-07|1997-07-10|NONE|RAIL| pending deposits. slyly re|
+4518|45|6|2|19|17955.76|0.10|0.05|N|O|1997-08-09|1997-06-06|1997-08-27|COLLECT COD|RAIL|ter the slyly bo|
+4519|55|3|1|30|28651.50|0.09|0.07|R|F|1993-04-11|1993-06-05|1993-04-22|DELIVER IN PERSON|REG AIR|totes. slyly bold somas after the |
+4519|191|3|2|37|40374.03|0.06|0.08|R|F|1993-07-22|1993-06-16|1993-08-19|COLLECT COD|AIR|ly slyly furious depth|
+4544|131|7|1|40|41245.20|0.07|0.01|N|O|1997-08-15|1997-10-16|1997-08-20|DELIVER IN PERSON|RAIL| detect slyly. evenly pending instru|
+4544|172|2|2|19|20371.23|0.08|0.01|N|O|1997-08-14|1997-09-08|1997-08-25|NONE|SHIP|regular ideas are furiously about|
+4544|71|9|3|20|19421.40|0.02|0.07|N|O|1997-10-12|1997-10-11|1997-10-13|COLLECT COD|REG AIR| waters about the|
+4544|51|6|4|39|37090.95|0.07|0.05|N|O|1997-08-20|1997-09-07|1997-08-27|COLLECT COD|REG AIR|ular packages. s|
+4544|133|4|5|31|32027.03|0.09|0.03|N|O|1997-08-09|1997-09-29|1997-08-17|COLLECT COD|TRUCK|dolites detect quickly reg|
+4544|27|8|6|8|7416.16|0.10|0.03|N|O|1997-10-13|1997-10-06|1997-10-25|COLLECT COD|AIR|olites. fi|
+4545|173|1|1|38|40780.46|0.06|0.06|R|F|1993-01-27|1993-03-01|1993-02-04|NONE|TRUCK|nts serve according to th|
+4545|63|4|2|27|26002.62|0.01|0.06|R|F|1993-02-07|1993-02-18|1993-02-18|NONE|FOB|ously bold asymptotes! blithely pen|
+4545|87|8|3|9|8883.72|0.10|0.06|R|F|1993-03-20|1993-02-23|1993-04-11|TAKE BACK RETURN|AIR|xpress accounts|
+4545|64|9|4|2|1928.12|0.10|0.00|R|F|1993-04-16|1993-04-17|1993-05-03|NONE|REG AIR|ages use. slyly even i|
+4545|117|1|5|27|27461.97|0.08|0.05|A|F|1993-03-18|1993-02-22|1993-03-23|NONE|RAIL|ccounts haggle carefully. deposits |
+4545|109|2|6|8|8072.80|0.03|0.02|A|F|1993-05-01|1993-03-12|1993-05-15|NONE|FOB| boost slyly. slyly|
+4545|9|2|7|36|32724.00|0.10|0.04|R|F|1993-01-28|1993-03-30|1993-02-04|DELIVER IN PERSON|SHIP|sublate slyly. furiously ironic accounts b|
+4546|133|4|1|10|10331.30|0.09|0.02|N|O|1995-09-23|1995-10-10|1995-10-23|COLLECT COD|TRUCK|osits alongside of the|
+4546|171|10|2|15|16067.55|0.04|0.07|N|O|1995-07-31|1995-10-17|1995-08-06|NONE|REG AIR|ught to cajole furiously. qu|
+4546|77|8|3|4|3908.28|0.06|0.08|N|O|1995-08-14|1995-10-07|1995-08-16|COLLECT COD|MAIL|kly pending dependencies along the furio|
+4546|149|6|4|10|10491.40|0.08|0.02|N|O|1995-09-02|1995-09-16|1995-09-10|DELIVER IN PERSON|FOB|above the enticingly ironic dependencies|
+4547|188|9|1|15|16322.70|0.10|0.04|A|F|1993-12-08|1993-11-15|1993-12-22|NONE|REG AIR|ets haggle. regular dinos affix fu|
+4547|116|10|2|7|7112.77|0.10|0.02|A|F|1993-09-04|1993-09-29|1993-09-20|COLLECT COD|RAIL|slyly express a|
+4547|45|2|3|15|14175.60|0.00|0.00|R|F|1993-11-18|1993-10-06|1993-12-13|NONE|TRUCK|e carefully across the unus|
+4547|148|7|4|15|15722.10|0.05|0.08|R|F|1993-11-29|1993-10-12|1993-12-29|COLLECT COD|REG AIR|ironic gifts integrate |
+4548|14|8|1|21|19194.21|0.10|0.05|N|O|1996-07-11|1996-09-04|1996-07-30|COLLECT COD|REG AIR|pecial theodoli|
+4548|47|10|2|17|16099.68|0.00|0.08|N|O|1996-07-23|1996-09-21|1996-07-26|DELIVER IN PERSON|REG AIR|y ironic requests above the fluffily d|
+4548|123|2|3|47|48086.64|0.05|0.04|N|O|1996-07-24|1996-09-12|1996-08-08|NONE|MAIL|ts. excuses use slyly spec|
+4548|177|6|4|22|23697.74|0.07|0.01|N|O|1996-07-06|1996-08-23|1996-07-15|DELIVER IN PERSON|RAIL|s. furiously ironic theodolites c|
+4548|45|4|5|36|34021.44|0.04|0.06|N|O|1996-08-19|1996-09-12|1996-09-08|COLLECT COD|FOB|tions integrat|
+4549|159|1|1|44|46602.60|0.08|0.00|N|O|1998-03-13|1998-04-15|1998-03-27|TAKE BACK RETURN|TRUCK|ding to the regular, silent requests|
+4549|89|10|2|1|989.08|0.05|0.08|N|O|1998-05-04|1998-04-11|1998-05-14|TAKE BACK RETURN|AIR| requests wake. furiously even |
+4550|150|7|1|9|9451.35|0.05|0.06|R|F|1995-04-19|1995-02-07|1995-04-24|COLLECT COD|SHIP|l dependencies boost slyly after th|
+4550|66|5|2|19|18355.14|0.06|0.04|A|F|1995-01-01|1995-02-13|1995-01-20|NONE|AIR|quests. express |
+4551|11|1|1|6|5466.06|0.08|0.08|N|O|1996-05-18|1996-04-23|1996-06-13|DELIVER IN PERSON|TRUCK|fily silent fo|
+4551|179|8|2|26|28058.42|0.02|0.04|N|O|1996-04-14|1996-04-26|1996-04-17|TAKE BACK RETURN|RAIL|le. carefully dogged accounts use furiousl|
+4551|22|1|3|22|20284.44|0.08|0.01|N|O|1996-05-12|1996-03-17|1996-05-29|TAKE BACK RETURN|REG AIR|ly ironic reques|
+4551|198|10|4|27|29651.13|0.00|0.01|N|O|1996-04-28|1996-03-22|1996-05-22|TAKE BACK RETURN|RAIL|y along the slyly even |
+4576|90|1|1|5|4950.45|0.09|0.03|N|O|1996-08-23|1996-11-08|1996-09-20|TAKE BACK RETURN|AIR|ly express, special asymptote|
+4576|58|9|2|43|41196.15|0.08|0.06|N|O|1996-10-24|1996-09-23|1996-11-10|NONE|SHIP|ly final deposits. never|
+4576|42|1|3|14|13188.56|0.09|0.01|N|O|1996-09-12|1996-09-30|1996-09-24|COLLECT COD|MAIL|detect slyly.|
+4577|185|6|1|43|46662.74|0.01|0.03|N|O|1998-06-16|1998-07-09|1998-06-17|TAKE BACK RETURN|AIR|packages. |
+4577|177|6|2|43|46318.31|0.05|0.03|N|O|1998-08-24|1998-06-02|1998-09-14|TAKE BACK RETURN|RAIL|ly accounts. carefully |
+4577|69|6|3|12|11628.72|0.07|0.05|N|O|1998-07-29|1998-06-17|1998-08-04|DELIVER IN PERSON|TRUCK|equests alongsi|
+4578|74|2|1|10|9740.70|0.09|0.06|R|F|1993-01-01|1992-11-19|1993-01-28|TAKE BACK RETURN|REG AIR|uests. blithely unus|
+4578|169|10|2|42|44904.72|0.06|0.00|R|F|1993-01-05|1992-11-06|1993-01-13|DELIVER IN PERSON|FOB|s are caref|
+4578|179|8|3|15|16187.55|0.01|0.01|R|F|1992-10-23|1992-11-22|1992-11-09|DELIVER IN PERSON|REG AIR|gular theodo|
+4578|139|10|4|7|7273.91|0.09|0.08|A|F|1992-12-07|1992-11-27|1993-01-05|TAKE BACK RETURN|SHIP|odolites. carefully unusual ideas accor|
+4578|163|2|5|20|21263.20|0.04|0.02|A|F|1993-01-11|1992-11-09|1993-01-23|TAKE BACK RETURN|RAIL|iously pending theodolites--|
+4579|175|4|1|14|15052.38|0.02|0.02|N|O|1996-02-01|1996-01-08|1996-02-08|TAKE BACK RETURN|MAIL|nding theodolites. fluffil|
+4579|42|3|2|28|26377.12|0.02|0.05|N|O|1996-01-22|1996-02-13|1996-02-03|DELIVER IN PERSON|RAIL|slyly across the |
+4579|178|9|3|34|36657.78|0.05|0.02|N|O|1996-02-26|1996-02-22|1996-03-16|COLLECT COD|MAIL|hely. carefully blithe dependen|
+4579|120|1|4|8|8160.96|0.05|0.06|N|O|1995-12-16|1996-01-15|1995-12-18|TAKE BACK RETURN|AIR|posits. carefully perman|
+4580|92|5|1|22|21825.98|0.01|0.05|A|F|1994-01-16|1994-01-26|1994-02-05|COLLECT COD|AIR|nticingly final packag|
+4580|32|3|2|10|9320.30|0.05|0.04|R|F|1993-12-20|1993-12-30|1994-01-17|COLLECT COD|RAIL|gular, pending deposits. fina|
+4580|1|8|3|41|36941.00|0.00|0.07|R|F|1993-12-13|1994-01-31|1994-01-06|NONE|SHIP|requests. quickly silent asymptotes sle|
+4580|178|8|4|5|5390.85|0.07|0.00|A|F|1994-01-28|1993-12-17|1994-02-22|NONE|TRUCK|o beans. f|
+4580|189|10|5|39|42478.02|0.03|0.02|R|F|1993-12-28|1993-12-26|1994-01-23|NONE|RAIL|. fluffily final dolphins use furiously al|
+4581|165|4|1|37|39410.92|0.01|0.04|A|F|1992-10-17|1992-11-05|1992-11-04|DELIVER IN PERSON|MAIL|e the blithely bold pearls ha|
+4581|50|3|2|7|6650.35|0.01|0.02|A|F|1992-10-09|1992-10-20|1992-10-21|TAKE BACK RETURN|MAIL|express accounts d|
+4581|21|10|3|46|42366.92|0.04|0.04|A|F|1992-09-09|1992-11-27|1992-09-26|NONE|REG AIR|nag toward the carefully final accounts. |
+4582|192|5|1|17|18567.23|0.09|0.08|N|O|1996-08-17|1996-08-26|1996-08-20|COLLECT COD|REG AIR|ng packages. depo|
+4583|141|2|1|17|17699.38|0.01|0.05|A|F|1994-11-08|1994-11-03|1994-11-29|COLLECT COD|MAIL|romise. reques|
+4583|187|8|2|43|46748.74|0.04|0.04|A|F|1994-10-30|1994-12-17|1994-11-16|COLLECT COD|RAIL|fully after the speci|
+4583|196|10|3|28|30693.32|0.00|0.07|A|F|1994-10-29|1994-11-21|1994-11-28|NONE|SHIP|to beans haggle sly|
+4583|173|4|4|27|28975.59|0.08|0.03|R|F|1995-01-11|1994-12-24|1995-02-10|DELIVER IN PERSON|TRUCK| detect silent requests. furiously speci|
+4583|184|5|5|36|39030.48|0.09|0.06|A|F|1995-01-06|1994-11-25|1995-01-29|DELIVER IN PERSON|RAIL|ar requests haggle after the furiously |
+4583|122|7|6|14|14309.68|0.09|0.01|R|F|1994-11-17|1994-11-08|1994-11-21|DELIVER IN PERSON|AIR|detect. doggedly regular pi|
+4583|87|8|7|32|31586.56|0.04|0.00|A|F|1995-01-13|1994-10-29|1995-02-08|TAKE BACK RETURN|RAIL|across the pinto beans-- quickly|
+4608|173|1|1|30|32195.10|0.08|0.05|R|F|1994-10-08|1994-07-18|1994-10-25|DELIVER IN PERSON|SHIP|s cajole. slyly |
+4608|47|8|2|50|47352.00|0.06|0.01|A|F|1994-07-25|1994-09-01|1994-08-10|NONE|FOB| theodolites|
+4608|79|9|3|50|48953.50|0.03|0.01|A|F|1994-08-04|1994-09-10|1994-08-13|COLLECT COD|TRUCK| wake closely. even decoys haggle above|
+4608|31|2|4|36|33517.08|0.05|0.06|R|F|1994-10-04|1994-08-02|1994-10-21|COLLECT COD|FOB|ages wake quickly slyly iron|
+4609|47|6|1|28|26517.12|0.10|0.05|N|O|1997-02-02|1997-02-17|1997-03-02|DELIVER IN PERSON|REG AIR|ously. quickly final requests cajole fl|
+4609|185|6|2|3|3255.54|0.09|0.03|N|O|1996-12-28|1997-02-06|1997-01-20|NONE|FOB|nstructions. furious instructions |
+4609|23|4|3|46|42458.92|0.05|0.05|N|O|1997-02-11|1997-01-16|1997-03-07|NONE|FOB|r foxes. fluffily ironic ideas ha|
+4610|87|8|1|21|20728.68|0.07|0.07|R|F|1993-08-10|1993-08-05|1993-08-27|NONE|REG AIR|ly special theodolites. even,|
+4610|175|5|2|14|15052.38|0.00|0.07|R|F|1993-07-28|1993-07-25|1993-07-31|TAKE BACK RETURN|SHIP| ironic frays. dependencies detect blithel|
+4610|159|1|3|44|46602.60|0.05|0.03|A|F|1993-08-05|1993-07-20|1993-08-19|COLLECT COD|TRUCK| final theodolites |
+4610|75|3|4|26|25351.82|0.06|0.03|R|F|1993-07-01|1993-07-19|1993-07-19|NONE|MAIL| to the fluffily ironic requests h|
+4610|147|8|5|29|30367.06|0.08|0.04|R|F|1993-08-09|1993-07-27|1993-08-16|DELIVER IN PERSON|AIR| foxes. special, express package|
+4611|52|7|1|47|44746.35|0.09|0.06|A|F|1993-03-05|1993-03-01|1993-03-17|COLLECT COD|TRUCK|iously. furiously regular|
+4611|35|6|2|31|28985.93|0.04|0.02|A|F|1993-01-28|1993-02-14|1993-01-29|TAKE BACK RETURN|AIR| final pinto beans. permanent, sp|
+4611|82|3|3|50|49104.00|0.08|0.01|R|F|1993-01-22|1993-03-30|1993-02-16|TAKE BACK RETURN|AIR|l platelets. |
+4611|71|9|4|48|46611.36|0.02|0.08|R|F|1993-02-28|1993-02-12|1993-03-01|COLLECT COD|AIR|ular accounts |
+4612|6|9|1|20|18120.00|0.02|0.03|R|F|1993-09-24|1993-12-18|1993-10-22|NONE|AIR|beans sleep blithely iro|
+4612|50|7|2|17|16150.85|0.10|0.06|A|F|1994-01-09|1993-11-08|1994-02-06|TAKE BACK RETURN|REG AIR|equests haggle carefully silent excus|
+4612|137|8|3|40|41485.20|0.08|0.01|R|F|1993-10-08|1993-11-23|1993-10-24|DELIVER IN PERSON|RAIL|special platelets.|
+4612|185|6|4|10|10851.80|0.10|0.06|A|F|1993-11-11|1993-11-19|1993-11-13|TAKE BACK RETURN|SHIP|unusual theodol|
+4613|38|9|1|17|15946.51|0.09|0.07|N|O|1998-06-07|1998-05-11|1998-06-29|DELIVER IN PERSON|SHIP|liers cajole a|
+4613|108|1|2|25|25202.50|0.05|0.04|N|O|1998-05-22|1998-04-11|1998-05-27|TAKE BACK RETURN|SHIP|y pending platelets x-ray ironically! pend|
+4613|174|3|3|15|16112.55|0.10|0.02|N|O|1998-05-31|1998-04-16|1998-06-25|DELIVER IN PERSON|MAIL|against the quickly r|
+4613|8|1|4|36|32688.00|0.04|0.01|N|O|1998-04-22|1998-05-05|1998-05-04|DELIVER IN PERSON|AIR|gainst the furiously ironic|
+4613|111|8|5|35|35388.85|0.04|0.06|N|O|1998-06-04|1998-04-17|1998-06-20|COLLECT COD|MAIL|e blithely against the even, bold pi|
+4613|196|8|6|47|51520.93|0.04|0.04|N|O|1998-07-03|1998-05-26|1998-07-09|NONE|FOB|uriously special requests wak|
+4613|119|3|7|39|39745.29|0.09|0.05|N|O|1998-06-12|1998-06-01|1998-07-06|DELIVER IN PERSON|REG AIR|ously express|
+4614|7|2|1|19|17233.00|0.09|0.08|N|O|1996-05-17|1996-06-21|1996-06-08|TAKE BACK RETURN|AIR|ix. carefully regular |
+4614|65|6|2|3|2895.18|0.08|0.01|N|O|1996-07-22|1996-07-21|1996-08-07|NONE|MAIL|ions engage final, ironic |
+4614|8|1|3|36|32688.00|0.10|0.04|N|O|1996-07-05|1996-06-26|1996-07-07|NONE|REG AIR|onic foxes affix furi|
+4614|126|9|4|6|6156.72|0.09|0.01|N|O|1996-06-11|1996-05-30|1996-07-03|COLLECT COD|REG AIR|ake quickly quickly regular epitap|
+4614|73|3|5|24|23353.68|0.07|0.06|N|O|1996-07-01|1996-06-24|1996-07-08|COLLECT COD|REG AIR|regular, even|
+4614|34|5|6|32|29888.96|0.10|0.05|N|O|1996-08-21|1996-05-28|1996-09-16|NONE|REG AIR|ickly furio|
+4614|128|1|7|41|42152.92|0.01|0.07|N|O|1996-07-31|1996-07-12|1996-08-16|COLLECT COD|REG AIR|ackages haggle carefully about the even, b|
+4615|92|4|1|10|9920.90|0.02|0.08|A|F|1993-11-20|1993-10-05|1993-12-08|DELIVER IN PERSON|AIR|sits. slyly express deposits are|
+4640|88|9|1|5|4940.40|0.03|0.08|N|O|1996-02-05|1996-02-14|1996-02-15|TAKE BACK RETURN|RAIL| warthogs against the regular|
+4640|88|9|2|9|8892.72|0.03|0.05|N|O|1996-02-12|1996-02-14|1996-02-29|DELIVER IN PERSON|AIR| accounts. unu|
+4640|27|10|3|18|16686.36|0.02|0.07|N|O|1996-02-28|1996-03-06|1996-03-28|DELIVER IN PERSON|RAIL|boost furiously accord|
+4640|23|2|4|36|33228.72|0.06|0.08|N|O|1996-01-03|1996-03-09|1996-01-11|DELIVER IN PERSON|RAIL|iously furious accounts boost. carefully|
+4640|156|1|5|15|15842.25|0.03|0.02|N|O|1996-03-19|1996-02-09|1996-04-11|TAKE BACK RETURN|FOB|y regular instructions doze furiously. reg|
+4641|190|1|1|45|49058.55|0.07|0.03|R|F|1993-05-11|1993-04-19|1993-05-21|DELIVER IN PERSON|MAIL| about the close |
+4641|95|7|2|39|38808.51|0.06|0.00|R|F|1993-02-10|1993-03-06|1993-02-15|TAKE BACK RETURN|REG AIR| the bold reque|
+4641|36|7|3|15|14040.45|0.01|0.08|R|F|1993-01-25|1993-04-09|1993-02-05|TAKE BACK RETURN|AIR|s. carefully even exc|
+4642|194|7|1|11|12036.09|0.04|0.07|A|F|1995-05-23|1995-04-26|1995-06-04|COLLECT COD|TRUCK|lithely express asympt|
+4642|180|10|2|34|36726.12|0.04|0.07|R|F|1995-04-01|1995-05-11|1995-04-23|COLLECT COD|SHIP|theodolites detect among the ironically sp|
+4642|21|2|3|10|9210.20|0.04|0.02|R|F|1995-04-16|1995-04-28|1995-04-24|COLLECT COD|RAIL|urts. even deposits nag beneath |
+4642|94|7|4|18|17893.62|0.00|0.04|N|F|1995-06-16|1995-04-16|1995-06-21|NONE|TRUCK|ily pending accounts hag|
+4642|179|10|5|41|44245.97|0.10|0.00|R|F|1995-04-08|1995-04-13|1995-05-01|DELIVER IN PERSON|MAIL|s are blithely. requests wake above the fur|
+4643|185|6|1|50|54259.00|0.08|0.05|N|O|1995-09-11|1995-08-13|1995-09-30|DELIVER IN PERSON|SHIP|. ironic deposits cajo|
+4644|177|7|1|4|4308.68|0.06|0.03|N|O|1998-05-06|1998-03-19|1998-05-28|NONE|MAIL|gular requests? pendi|
+4644|97|8|2|16|15953.44|0.03|0.04|N|O|1998-03-13|1998-02-21|1998-04-03|COLLECT COD|SHIP|lar excuses across the |
+4644|115|9|3|10|10151.10|0.02|0.02|N|O|1998-02-21|1998-02-28|1998-03-19|COLLECT COD|REG AIR|osits according to the|
+4644|154|2|4|45|47436.75|0.10|0.07|N|O|1998-02-02|1998-04-08|1998-02-15|COLLECT COD|SHIP| carefully a|
+4644|87|8|5|10|9870.80|0.08|0.08|N|O|1998-03-12|1998-03-11|1998-03-19|TAKE BACK RETURN|REG AIR| the slow, final fo|
+4645|50|7|1|45|42752.25|0.09|0.05|A|F|1994-12-27|1994-11-02|1994-12-31|DELIVER IN PERSON|AIR|ular ideas. slyly|
+4645|66|7|2|32|30913.92|0.10|0.08|A|F|1994-11-17|1994-10-30|1994-11-18|COLLECT COD|REG AIR| final accounts alongside|
+4645|54|5|3|25|23851.25|0.03|0.00|R|F|1994-10-25|1994-12-11|1994-11-14|NONE|REG AIR|braids. ironic dependencies main|
+4645|37|8|4|42|39355.26|0.10|0.02|R|F|1994-12-02|1994-12-18|1994-12-16|COLLECT COD|TRUCK|regular pinto beans amon|
+4645|161|10|5|35|37140.60|0.03|0.07|A|F|1994-12-08|1994-11-25|1994-12-09|TAKE BACK RETURN|FOB|sias believe bl|
+4645|42|9|6|27|25435.08|0.09|0.08|R|F|1994-11-26|1994-10-25|1994-12-04|NONE|SHIP|ously express pinto beans. ironic depos|
+4645|31|2|7|42|39103.26|0.10|0.06|A|F|1994-12-31|1994-10-22|1995-01-28|DELIVER IN PERSON|AIR|e slyly regular pinto beans. thin|
+4646|191|3|1|24|26188.56|0.02|0.05|N|O|1996-09-18|1996-08-09|1996-09-21|TAKE BACK RETURN|RAIL|ic platelets lose carefully. blithely unu|
+4646|178|6|2|26|28032.42|0.07|0.00|N|O|1996-10-02|1996-08-25|1996-10-27|DELIVER IN PERSON|MAIL|ix according to the slyly spe|
+4646|34|10|3|18|16812.54|0.01|0.00|N|O|1996-06-30|1996-08-10|1996-07-12|TAKE BACK RETURN|TRUCK|beans sleep car|
+4646|40|1|4|38|35721.52|0.08|0.01|N|O|1996-09-01|1996-08-23|1996-09-27|COLLECT COD|SHIP|al platelets cajole. slyly final dol|
+4646|26|1|5|22|20372.44|0.01|0.08|N|O|1996-07-14|1996-08-06|1996-07-29|DELIVER IN PERSON|MAIL|cies are blithely after the slyly reg|
+4647|93|6|1|16|15889.44|0.09|0.07|R|F|1994-09-07|1994-07-15|1994-10-06|COLLECT COD|RAIL|o beans about the fluffily special the|
+4647|129|2|2|34|34990.08|0.01|0.02|R|F|1994-05-20|1994-06-20|1994-05-29|COLLECT COD|TRUCK|ly sly accounts|
+4647|147|8|3|27|28272.78|0.03|0.08|R|F|1994-05-20|1994-06-26|1994-05-30|NONE|FOB|ully even ti|
+4647|139|10|4|2|2078.26|0.04|0.07|R|F|1994-07-03|1994-07-22|1994-07-22|TAKE BACK RETURN|RAIL|dolites wake furiously special pinto be|
+4647|187|8|5|2|2174.36|0.07|0.06|A|F|1994-05-27|1994-08-05|1994-06-10|TAKE BACK RETURN|FOB| pinto beans believe furiously slyly silent|
+4647|29|4|6|28|26012.56|0.02|0.03|A|F|1994-08-25|1994-08-06|1994-09-18|DELIVER IN PERSON|FOB| are above the fluffily fin|
+4672|59|7|1|22|21099.10|0.01|0.07|N|O|1995-12-03|1995-12-08|1995-12-17|COLLECT COD|AIR|l instructions. blithely ironic packages |
+4672|61|10|2|41|39403.46|0.00|0.00|N|O|1995-12-01|1995-12-15|1995-12-12|COLLECT COD|RAIL| slyly quie|
+4672|163|10|3|24|25515.84|0.04|0.03|N|O|1995-11-11|1995-12-28|1995-12-04|NONE|REG AIR|y fluffily stealt|
+4672|57|2|4|13|12441.65|0.10|0.03|N|O|1996-02-02|1995-12-13|1996-03-02|DELIVER IN PERSON|RAIL|ar requests? pending accounts against|
+4672|55|10|5|45|42977.25|0.08|0.07|N|O|1996-02-07|1996-01-16|1996-02-14|DELIVER IN PERSON|MAIL| platelets use amon|
+4672|141|8|6|20|20822.80|0.02|0.07|N|O|1995-12-08|1996-01-25|1995-12-19|COLLECT COD|REG AIR|s boost at the ca|
+4672|72|10|7|38|36938.66|0.01|0.01|N|O|1995-11-28|1995-12-08|1995-12-13|COLLECT COD|SHIP|ests. idle, regular ex|
+4673|17|8|1|8|7336.08|0.08|0.01|N|O|1996-10-12|1996-10-05|1996-11-04|TAKE BACK RETURN|FOB|lithely final re|
+4673|101|2|2|44|44048.40|0.06|0.01|N|O|1996-12-11|1996-10-31|1997-01-08|DELIVER IN PERSON|RAIL| gifts cajole dari|
+4673|123|2|3|9|9208.08|0.04|0.07|N|O|1996-10-15|1996-09-30|1996-10-30|DELIVER IN PERSON|MAIL|ages nag across |
+4674|150|7|1|50|52507.50|0.07|0.08|A|F|1994-05-13|1994-06-15|1994-06-05|COLLECT COD|RAIL|haggle about the blithel|
+4674|189|10|2|35|38121.30|0.02|0.05|A|F|1994-08-02|1994-06-04|1994-08-21|COLLECT COD|FOB|le quickly after the express sent|
+4674|111|5|3|3|3033.33|0.01|0.05|A|F|1994-07-19|1994-05-28|1994-07-23|TAKE BACK RETURN|RAIL| regular requests na|
+4674|13|7|4|21|19173.21|0.02|0.08|R|F|1994-05-08|1994-07-02|1994-06-04|COLLECT COD|RAIL|ent accounts sublate deposits. instruc|
+4675|171|2|1|6|6427.02|0.00|0.05|R|F|1994-01-22|1994-01-06|1994-02-12|TAKE BACK RETURN|TRUCK| unusual ideas thrash bl|
+4675|144|7|2|12|12529.68|0.00|0.04|A|F|1993-12-22|1994-01-12|1993-12-23|TAKE BACK RETURN|AIR|posits affix carefully|
+4675|181|2|3|5|5405.90|0.05|0.05|A|F|1994-01-16|1994-01-05|1994-01-18|DELIVER IN PERSON|RAIL|lent pinto beans|
+4675|34|10|4|26|24284.78|0.03|0.01|A|F|1993-12-16|1993-12-29|1993-12-23|DELIVER IN PERSON|SHIP|nts. express requests are quickly |
+4675|81|2|5|18|17659.44|0.01|0.08|R|F|1994-02-23|1994-01-18|1994-03-05|TAKE BACK RETURN|FOB|cajole unusual dep|
+4675|119|10|6|1|1019.11|0.10|0.06|R|F|1994-03-18|1994-02-14|1994-04-17|NONE|SHIP|unts. caref|
+4676|165|2|1|47|50062.52|0.03|0.06|N|O|1995-12-20|1995-10-04|1996-01-09|NONE|AIR|lithely about the carefully special requ|
+4676|6|1|2|33|29898.00|0.08|0.05|N|O|1995-12-29|1995-10-01|1996-01-18|TAKE BACK RETURN|FOB|yly express |
+4676|146|3|3|4|4184.56|0.10|0.06|N|O|1995-12-12|1995-10-22|1995-12-13|TAKE BACK RETURN|TRUCK|detect above the ironic platelets. fluffily|
+4676|111|2|4|50|50555.50|0.07|0.01|N|O|1995-09-20|1995-11-20|1995-10-18|TAKE BACK RETURN|AIR|r deposits boost boldly quickly quick asymp|
+4676|122|7|5|29|29641.48|0.01|0.02|N|O|1995-12-29|1995-11-12|1996-01-06|TAKE BACK RETURN|RAIL|ly regular theodolites sleep.|
+4676|46|7|6|8|7568.32|0.08|0.08|N|O|1995-12-05|1995-10-18|1996-01-02|COLLECT COD|AIR|cuses boost above|
+4676|64|1|7|13|12532.78|0.05|0.07|N|O|1995-11-18|1995-11-07|1995-12-10|TAKE BACK RETURN|TRUCK| at the slyly bold attainments. silently e|
+4677|128|3|1|25|25703.00|0.04|0.04|N|O|1998-04-11|1998-05-11|1998-04-18|TAKE BACK RETURN|REG AIR|unts doubt furiousl|
+4678|58|6|1|35|33531.75|0.04|0.08|N|O|1998-11-27|1998-10-02|1998-12-17|TAKE BACK RETURN|AIR|he accounts. fluffily bold sheaves b|
+4678|117|1|2|18|18307.98|0.03|0.06|N|O|1998-10-30|1998-09-22|1998-11-25|TAKE BACK RETURN|SHIP|usly ironic |
+4678|96|9|3|13|12949.17|0.10|0.07|N|O|1998-11-03|1998-10-17|1998-11-06|TAKE BACK RETURN|SHIP|its. carefully final fr|
+4678|22|1|4|23|21206.46|0.06|0.05|N|O|1998-09-03|1998-09-20|1998-09-04|DELIVER IN PERSON|SHIP|ily sly deposi|
+4678|178|9|5|40|43126.80|0.03|0.07|N|O|1998-11-11|1998-10-27|1998-11-24|TAKE BACK RETURN|AIR|. final, unusual requests sleep thinl|
+4679|190|1|1|7|7631.33|0.10|0.05|R|F|1993-05-11|1993-04-11|1993-05-16|NONE|TRUCK|kages. bold, regular packa|
+4704|78|6|1|14|13692.98|0.04|0.04|N|O|1996-10-27|1996-11-02|1996-11-07|DELIVER IN PERSON|TRUCK| above the slyly final requests. quickly |
+4704|28|3|2|7|6496.14|0.03|0.04|N|O|1996-12-04|1996-10-30|1996-12-23|DELIVER IN PERSON|SHIP|ers wake car|
+4704|64|5|3|44|42418.64|0.02|0.05|N|O|1996-09-02|1996-10-07|1996-09-17|DELIVER IN PERSON|REG AIR|out the care|
+4705|111|8|1|22|22244.42|0.04|0.04|R|F|1992-07-05|1992-05-11|1992-07-29|DELIVER IN PERSON|SHIP| fluffily pending accounts ca|
+4705|31|7|2|14|13034.42|0.00|0.08|R|F|1992-07-14|1992-05-23|1992-07-25|DELIVER IN PERSON|TRUCK|ain carefully amon|
+4705|56|1|3|16|15296.80|0.07|0.08|R|F|1992-07-02|1992-06-06|1992-07-06|DELIVER IN PERSON|RAIL|special ideas nag sl|
+4705|130|3|4|31|31934.03|0.03|0.03|R|F|1992-04-03|1992-05-30|1992-04-05|COLLECT COD|TRUCK|furiously final accou|
+4705|163|10|5|28|29768.48|0.10|0.01|A|F|1992-06-03|1992-06-07|1992-06-22|DELIVER IN PERSON|MAIL|tes wake according to the unusual plate|
+4705|184|5|6|23|24936.14|0.06|0.03|R|F|1992-06-22|1992-06-11|1992-07-18|DELIVER IN PERSON|MAIL| above the furiously ev|
+4705|89|10|7|40|39563.20|0.08|0.06|A|F|1992-04-19|1992-04-28|1992-05-07|COLLECT COD|TRUCK|blithely. sly|
+4706|182|3|1|37|40040.66|0.02|0.06|A|F|1993-02-20|1993-03-05|1993-03-03|DELIVER IN PERSON|TRUCK|kly final deposits c|
+4706|122|3|2|23|23508.76|0.03|0.01|A|F|1993-04-01|1993-03-13|1993-05-01|COLLECT COD|FOB|deas across t|
+4706|68|5|3|6|5808.36|0.01|0.04|R|F|1993-01-20|1993-03-18|1993-01-26|NONE|MAIL|efully eve|
+4706|116|10|4|5|5080.55|0.06|0.06|R|F|1993-02-14|1993-01-31|1993-02-26|NONE|REG AIR|ptotes haggle ca|
+4706|50|7|5|27|25651.35|0.06|0.08|A|F|1993-04-04|1993-03-11|1993-04-09|COLLECT COD|REG AIR|into beans. finally special instruct|
+4707|34|5|1|7|6538.21|0.02|0.05|R|F|1995-05-14|1995-04-06|1995-06-06|COLLECT COD|SHIP|ecial sheaves boost blithely accor|
+4707|136|7|2|49|50770.37|0.00|0.07|N|F|1995-06-17|1995-05-16|1995-06-25|COLLECT COD|FOB| alongside of the slyly ironic instructio|
+4708|191|4|1|18|19641.42|0.02|0.04|A|F|1994-11-11|1994-11-15|1994-11-26|NONE|REG AIR|special, eve|
+4708|75|3|2|5|4875.35|0.05|0.05|A|F|1994-10-15|1994-12-02|1994-11-12|COLLECT COD|MAIL|ely. carefully sp|
+4708|77|7|3|32|31266.24|0.04|0.07|A|F|1994-11-12|1994-11-14|1994-11-23|TAKE BACK RETURN|MAIL|the accounts. e|
+4709|25|6|1|25|23125.50|0.03|0.05|N|O|1996-02-21|1996-02-11|1996-03-17|DELIVER IN PERSON|AIR|deposits grow. fluffily unusual accounts |
+4709|177|5|2|25|26929.25|0.05|0.03|N|O|1996-01-22|1996-03-03|1996-02-21|DELIVER IN PERSON|REG AIR|inst the ironic, regul|
+4710|183|4|1|40|43327.20|0.10|0.08|A|F|1995-03-09|1995-02-25|1995-03-29|TAKE BACK RETURN|AIR|cross the blithely bold packages. silen|
+4710|128|3|2|47|48321.64|0.04|0.01|R|F|1995-02-22|1995-01-12|1995-02-28|NONE|RAIL|blithely express packages. even, ironic re|
+4711|133|4|1|7|7231.91|0.03|0.01|N|O|1998-05-12|1998-06-24|1998-05-24|COLLECT COD|MAIL|ly. bold accounts use fluff|
+4711|145|6|2|15|15677.10|0.08|0.07|N|O|1998-06-09|1998-07-30|1998-06-18|COLLECT COD|SHIP| beans wake. deposits could bo|
+4711|150|1|3|22|23103.30|0.02|0.03|N|O|1998-06-21|1998-06-18|1998-07-19|TAKE BACK RETURN|REG AIR|along the quickly careful packages. bli|
+4711|65|10|4|8|7720.48|0.07|0.00|N|O|1998-06-17|1998-06-13|1998-06-27|TAKE BACK RETURN|SHIP|g to the carefully ironic deposits. specia|
+4711|49|2|5|15|14235.60|0.05|0.01|N|O|1998-09-03|1998-07-15|1998-09-13|TAKE BACK RETURN|SHIP|ld requests: furiously final inst|
+4711|116|7|6|45|45724.95|0.05|0.06|N|O|1998-05-19|1998-07-14|1998-05-21|COLLECT COD|SHIP| ironic theodolites |
+4711|46|5|7|18|17028.72|0.05|0.04|N|O|1998-07-03|1998-07-31|1998-07-23|DELIVER IN PERSON|RAIL| blithely. bold asymptote|
+4736|196|10|1|26|28500.94|0.03|0.03|N|O|1996-02-02|1996-01-18|1996-02-09|DELIVER IN PERSON|AIR|efully speci|
+4736|4|1|2|43|38872.00|0.06|0.07|N|O|1996-02-05|1995-12-21|1996-02-06|COLLECT COD|MAIL|quests. carefully |
+4737|191|5|1|37|40374.03|0.03|0.04|R|F|1993-05-17|1993-04-10|1993-05-30|DELIVER IN PERSON|TRUCK|s. fluffily regular |
+4737|69|8|2|22|21319.32|0.04|0.04|A|F|1993-03-29|1993-05-22|1993-04-16|TAKE BACK RETURN|RAIL| hang fluffily around t|
+4738|187|8|1|9|9784.62|0.04|0.04|A|F|1992-06-01|1992-06-26|1992-06-02|COLLECT COD|TRUCK|posits serve slyly. unusual pint|
+4738|173|3|2|16|17170.72|0.07|0.08|A|F|1992-06-17|1992-06-20|1992-06-21|NONE|MAIL|nic deposits are slyly! carefu|
+4738|100|2|3|50|50005.00|0.04|0.02|A|F|1992-06-18|1992-07-04|1992-07-07|TAKE BACK RETURN|TRUCK|the blithely ironic braids sleep slyly|
+4738|29|4|4|22|20438.44|0.02|0.08|A|F|1992-05-25|1992-05-19|1992-06-12|COLLECT COD|SHIP|ld, even packages. furio|
+4738|187|8|5|13|14133.34|0.04|0.05|R|F|1992-05-30|1992-06-11|1992-06-26|COLLECT COD|AIR| wake. unusual platelets for the|
+4738|159|1|6|10|10591.50|0.10|0.01|R|F|1992-07-10|1992-06-16|1992-07-25|TAKE BACK RETURN|SHIP|hins above the|
+4738|83|4|7|28|27526.24|0.05|0.07|A|F|1992-06-09|1992-07-05|1992-06-25|NONE|AIR|e furiously ironic excuses. care|
+4739|168|9|1|8|8545.28|0.07|0.07|R|F|1993-06-22|1993-05-10|1993-07-11|TAKE BACK RETURN|SHIP|cording to the |
+4739|185|6|2|31|33640.58|0.09|0.06|R|F|1993-06-20|1993-05-18|1993-06-26|COLLECT COD|SHIP|blithely special pin|
+4739|100|4|3|30|30003.00|0.09|0.00|A|F|1993-05-29|1993-04-12|1993-06-18|NONE|TRUCK|ly even packages use across th|
+4740|3|4|1|22|19866.00|0.06|0.01|N|O|1996-10-04|1996-08-17|1996-10-05|TAKE BACK RETURN|RAIL|final dependencies nag |
+4740|153|5|2|24|25275.60|0.08|0.02|N|O|1996-09-10|1996-09-27|1996-10-07|TAKE BACK RETURN|TRUCK|hely regular deposits|
+4741|73|2|1|24|23353.68|0.00|0.01|A|F|1992-09-16|1992-09-19|1992-09-20|DELIVER IN PERSON|RAIL|deas boost furiously slyly regular id|
+4741|113|4|2|16|16209.76|0.01|0.07|R|F|1992-08-25|1992-08-10|1992-08-29|TAKE BACK RETURN|FOB|final foxes haggle r|
+4741|156|8|3|24|25347.60|0.05|0.08|A|F|1992-11-04|1992-08-14|1992-11-06|TAKE BACK RETURN|MAIL|even requests.|
+4741|51|3|4|39|37090.95|0.09|0.06|R|F|1992-10-28|1992-10-03|1992-11-11|COLLECT COD|SHIP|t, regular requests|
+4741|179|10|5|40|43166.80|0.09|0.03|R|F|1992-09-20|1992-09-23|1992-10-09|TAKE BACK RETURN|REG AIR| fluffily slow deposits. fluffily regu|
+4741|157|5|6|34|35943.10|0.02|0.07|R|F|1992-08-25|1992-08-18|1992-09-20|DELIVER IN PERSON|RAIL|sly special packages after the furiously|
+4742|156|4|1|32|33796.80|0.10|0.08|R|F|1995-04-04|1995-06-12|1995-04-19|COLLECT COD|RAIL|eposits boost blithely. carefully regular a|
+4742|155|7|2|29|30599.35|0.02|0.03|N|F|1995-06-15|1995-05-05|1995-06-24|COLLECT COD|REG AIR|integrate closely among t|
+4742|72|10|3|15|14581.05|0.06|0.04|N|O|1995-07-20|1995-05-26|1995-08-11|NONE|SHIP|terns are sl|
+4742|188|9|4|31|33733.58|0.05|0.08|N|F|1995-06-13|1995-05-08|1995-06-24|COLLECT COD|REG AIR|ke slyly among the furiousl|
+4742|100|1|5|45|45004.50|0.05|0.00|R|F|1995-05-12|1995-05-14|1995-06-07|TAKE BACK RETURN|RAIL|ke carefully. do|
+4743|60|5|1|19|18241.14|0.04|0.07|A|F|1993-06-23|1993-05-03|1993-07-20|COLLECT COD|AIR|hely even accounts|
+4743|159|4|2|3|3177.45|0.01|0.03|R|F|1993-04-14|1993-06-08|1993-05-09|NONE|TRUCK|al requests. express idea|
+4743|73|2|3|21|20434.47|0.08|0.03|A|F|1993-07-02|1993-06-15|1993-07-26|DELIVER IN PERSON|RAIL|ake blithely against the packages. reg|
+4743|34|5|4|27|25218.81|0.08|0.05|R|F|1993-07-26|1993-05-27|1993-08-24|DELIVER IN PERSON|AIR|aids use. express deposits|
+4768|36|7|1|5|4680.15|0.00|0.03|R|F|1993-12-27|1994-02-09|1994-01-11|NONE|MAIL|egular accounts. bravely final fra|
+4769|35|1|1|16|14960.48|0.08|0.05|N|O|1995-07-16|1995-07-05|1995-07-22|TAKE BACK RETURN|FOB| deposits. slyly even asymptote|
+4769|63|8|2|34|32744.04|0.06|0.07|N|O|1995-07-26|1995-05-18|1995-08-03|COLLECT COD|REG AIR|ven instructions. ca|
+4769|47|10|3|36|34093.44|0.10|0.03|N|O|1995-07-22|1995-06-16|1995-08-11|NONE|RAIL|. slyly even deposit|
+4769|69|10|4|45|43607.70|0.08|0.06|R|F|1995-06-01|1995-07-13|1995-06-04|TAKE BACK RETURN|RAIL|accounts are. even accounts sleep|
+4769|112|6|5|15|15181.65|0.07|0.08|N|F|1995-06-12|1995-07-07|1995-07-04|NONE|SHIP|egular platelets can cajole across the |
+4770|32|8|1|41|38213.23|0.00|0.08|N|O|1995-09-04|1995-08-08|1995-09-10|COLLECT COD|FOB|ithely even packages sleep caref|
+4770|157|5|2|30|31714.50|0.09|0.07|N|O|1995-08-25|1995-08-27|1995-09-07|COLLECT COD|SHIP|ffily carefully ironic ideas. ironic d|
+4771|49|10|1|9|8541.36|0.01|0.00|R|F|1993-02-28|1993-02-19|1993-03-25|NONE|FOB|riously after the packages. fina|
+4771|16|7|2|21|19236.21|0.09|0.01|R|F|1993-01-19|1993-02-10|1993-02-01|NONE|FOB|fluffily pendi|
+4771|12|3|3|5|4560.05|0.06|0.08|R|F|1993-01-07|1993-01-19|1993-01-26|NONE|RAIL|ar, quiet accounts nag furiously express id|
+4771|9|4|4|21|19089.00|0.05|0.04|A|F|1992-12-20|1993-01-22|1992-12-26|TAKE BACK RETURN|SHIP| carefully re|
+4772|87|8|1|1|987.08|0.10|0.00|R|F|1994-11-13|1994-10-25|1994-11-15|DELIVER IN PERSON|AIR|ans. slyly even acc|
+4772|146|9|2|16|16738.24|0.07|0.06|R|F|1994-10-27|1994-12-07|1994-10-29|TAKE BACK RETURN|MAIL|egular accounts wake s|
+4772|95|6|3|31|30847.79|0.02|0.04|A|F|1994-10-02|1994-10-21|1994-10-13|TAKE BACK RETURN|FOB|ests are thinly. furiously unusua|
+4772|71|10|4|15|14566.05|0.02|0.07|R|F|1994-09-19|1994-10-22|1994-09-26|COLLECT COD|TRUCK| requests. express, regular th|
+4773|144|5|1|23|24015.22|0.00|0.08|N|O|1996-01-01|1996-03-19|1996-01-04|NONE|FOB|ly express grouches wak|
+4773|197|9|2|36|39498.84|0.09|0.04|N|O|1996-04-08|1996-03-03|1996-05-01|COLLECT COD|REG AIR| dependencies. quickly|
+4773|167|8|3|49|52290.84|0.05|0.02|N|O|1996-01-26|1996-02-29|1996-01-27|TAKE BACK RETURN|FOB|y final reque|
+4773|20|10|4|49|45080.98|0.09|0.04|N|O|1996-01-12|1996-02-17|1996-02-05|TAKE BACK RETURN|TRUCK|ly pending theodolites cajole caref|
+4773|150|3|5|20|21003.00|0.02|0.07|N|O|1995-12-28|1996-02-17|1996-01-15|COLLECT COD|TRUCK| blithely final deposits nag after t|
+4773|190|1|6|11|11992.09|0.10|0.06|N|O|1996-01-02|1996-01-29|1996-01-24|DELIVER IN PERSON|REG AIR|en accounts. slyly b|
+4773|158|3|7|6|6348.90|0.07|0.01|N|O|1996-03-09|1996-03-18|1996-03-27|NONE|AIR|latelets haggle s|
+4774|84|5|1|45|44283.60|0.10|0.00|R|F|1993-07-07|1993-06-08|1993-07-31|COLLECT COD|TRUCK| haggle busily afte|
+4774|39|5|2|4|3756.12|0.02|0.03|A|F|1993-08-03|1993-05-30|1993-08-19|COLLECT COD|FOB|xes according to the foxes wake above the f|
+4774|173|4|3|47|50438.99|0.10|0.08|R|F|1993-06-13|1993-07-04|1993-07-09|TAKE BACK RETURN|FOB|regular dolphins above the furi|
+4774|130|3|4|30|30903.90|0.05|0.08|A|F|1993-08-18|1993-06-08|1993-08-21|DELIVER IN PERSON|REG AIR|tions against the blithely final theodolit|
+4775|74|4|1|1|974.07|0.10|0.02|N|O|1995-09-06|1995-09-28|1995-09-29|DELIVER IN PERSON|MAIL|furiously ironic theodolite|
+4775|153|1|2|37|38966.55|0.02|0.01|N|O|1995-09-06|1995-09-28|1995-09-28|COLLECT COD|TRUCK|ts. pinto beans use according to th|
+4775|153|5|3|34|35807.10|0.09|0.06|N|O|1995-09-14|1995-10-15|1995-09-21|DELIVER IN PERSON|MAIL|onic epitaphs. f|
+4775|119|9|4|39|39745.29|0.07|0.04|N|O|1995-08-30|1995-10-12|1995-09-20|NONE|AIR|eep never with the slyly regular acc|
+4800|97|10|1|11|10967.99|0.03|0.03|R|F|1992-01-27|1992-03-16|1992-02-19|TAKE BACK RETURN|RAIL|ic dependenc|
+4800|26|5|2|1|926.02|0.06|0.06|A|F|1992-02-23|1992-03-16|1992-03-20|TAKE BACK RETURN|MAIL|nal accounts are blithely deposits. bol|
+4800|11|8|3|21|19131.21|0.09|0.05|A|F|1992-02-14|1992-03-15|1992-02-26|NONE|SHIP|ithely according to |
+4800|176|7|4|38|40894.46|0.10|0.08|R|F|1992-02-01|1992-02-28|1992-02-21|NONE|TRUCK|s sleep fluffily. furiou|
+4800|53|4|5|24|22873.20|0.08|0.04|R|F|1992-01-14|1992-02-23|1992-01-25|NONE|TRUCK|ully carefully r|
+4801|184|5|1|37|40114.66|0.10|0.02|N|O|1996-03-09|1996-02-29|1996-03-25|TAKE BACK RETURN|FOB|uests hinder blithely against the instr|
+4801|26|1|2|34|31484.68|0.03|0.02|N|O|1996-02-05|1996-04-16|1996-02-23|NONE|SHIP|y final requests |
+4801|110|1|3|4|4040.44|0.04|0.04|N|O|1996-03-23|1996-04-04|1996-03-25|COLLECT COD|RAIL|pitaphs. regular, reg|
+4801|92|3|4|39|38691.51|0.07|0.01|N|O|1996-03-19|1996-03-21|1996-04-17|TAKE BACK RETURN|REG AIR|warhorses wake never for the care|
+4802|40|1|1|6|5640.24|0.00|0.06|N|O|1997-04-16|1997-03-25|1997-04-21|TAKE BACK RETURN|SHIP|unusual accounts wake blithely. b|
+4803|132|3|1|2|2064.26|0.08|0.03|N|O|1996-04-16|1996-03-20|1996-05-15|NONE|REG AIR|gular reque|
+4803|176|4|2|47|50579.99|0.10|0.00|N|O|1996-03-14|1996-03-30|1996-03-15|DELIVER IN PERSON|FOB|ly final excuses. slyly express requ|
+4803|196|8|3|42|46039.98|0.04|0.08|N|O|1996-04-27|1996-05-05|1996-05-17|NONE|TRUCK| accounts affix quickly ar|
+4803|22|1|4|24|22128.48|0.10|0.04|N|O|1996-02-24|1996-04-02|1996-02-28|NONE|MAIL|t blithely slyly special decoys. |
+4803|189|10|5|21|22872.78|0.03|0.06|N|O|1996-05-25|1996-03-15|1996-06-09|COLLECT COD|FOB| silent packages use. b|
+4803|194|5|6|19|20789.61|0.07|0.00|N|O|1996-04-20|1996-03-25|1996-04-27|TAKE BACK RETURN|RAIL|sts. enticing, even|
+4804|128|1|1|44|45237.28|0.06|0.08|A|F|1992-05-02|1992-03-24|1992-05-28|TAKE BACK RETURN|AIR|aggle quickly among the slyly fi|
+4804|35|6|2|41|38336.23|0.10|0.02|R|F|1992-04-06|1992-04-12|1992-05-03|COLLECT COD|MAIL|. deposits haggle express tithes?|
+4804|65|2|3|33|31846.98|0.09|0.05|A|F|1992-03-02|1992-04-14|1992-03-13|DELIVER IN PERSON|AIR|, thin excuses. |
+4805|150|1|1|7|7351.05|0.09|0.03|A|F|1992-05-01|1992-07-09|1992-05-09|NONE|FOB| requests. regular deposit|
+4805|189|10|2|45|49013.10|0.02|0.03|R|F|1992-06-16|1992-06-08|1992-07-03|NONE|TRUCK|the furiously sly t|
+4805|154|6|3|44|46382.60|0.01|0.02|R|F|1992-05-14|1992-06-23|1992-05-25|DELIVER IN PERSON|SHIP|eposits sleep furiously qui|
+4805|65|2|4|13|12545.78|0.04|0.04|R|F|1992-07-16|1992-06-07|1992-08-10|COLLECT COD|AIR|its serve about the accounts. slyly regu|
+4805|9|10|5|42|38178.00|0.03|0.03|R|F|1992-08-17|1992-07-03|1992-09-14|NONE|REG AIR|the regular, fina|
+4805|136|7|6|18|18650.34|0.06|0.04|A|F|1992-06-07|1992-07-10|1992-06-12|COLLECT COD|TRUCK|o use pending, unusu|
+4806|16|7|1|26|23816.26|0.10|0.05|R|F|1993-05-28|1993-06-07|1993-05-29|DELIVER IN PERSON|SHIP| bold pearls sublate blithely. quickly pe|
+4806|72|10|2|6|5832.42|0.01|0.06|A|F|1993-05-17|1993-07-19|1993-05-29|TAKE BACK RETURN|SHIP|even theodolites. packages sl|
+4806|29|4|3|8|7432.16|0.09|0.00|A|F|1993-05-08|1993-07-16|1993-05-28|NONE|TRUCK|requests boost blithely. qui|
+4807|122|1|1|9|9199.08|0.04|0.08|N|O|1997-04-23|1997-03-01|1997-05-15|TAKE BACK RETURN|TRUCK|may are blithely. carefully even pinto b|
+4807|10|1|2|41|37310.41|0.07|0.08|N|O|1997-05-02|1997-03-31|1997-05-15|TAKE BACK RETURN|AIR| fluffily re|
+4807|145|6|3|34|35534.76|0.06|0.02|N|O|1997-01-31|1997-03-13|1997-02-01|NONE|SHIP|ecial ideas. deposits according to the fin|
+4807|190|1|4|32|34886.08|0.05|0.00|N|O|1997-04-04|1997-03-21|1997-04-16|NONE|RAIL|efully even dolphins slee|
+4807|159|1|5|2|2118.30|0.02|0.05|N|O|1997-05-09|1997-04-03|1997-06-05|TAKE BACK RETURN|RAIL|deas wake bli|
+4807|160|1|6|22|23323.52|0.09|0.06|N|O|1997-03-13|1997-02-23|1997-04-01|NONE|FOB|es use final excuses. furiously final|
+4832|15|6|1|23|21045.23|0.03|0.01|N|O|1997-12-05|1998-01-05|1997-12-10|NONE|RAIL|y express depo|
+4832|152|4|2|10|10521.50|0.00|0.06|N|O|1998-01-08|1998-02-01|1998-01-11|DELIVER IN PERSON|MAIL|ly. blithely bold pinto beans should have|
+4832|149|6|3|4|4196.56|0.04|0.01|N|O|1998-01-16|1998-02-12|1998-02-08|TAKE BACK RETURN|AIR|ages. slyly express deposits cajole car|
+4832|64|5|4|6|5784.36|0.02|0.01|N|O|1997-12-08|1998-02-03|1997-12-10|COLLECT COD|TRUCK|ages cajole after the bold requests. furi|
+4832|138|4|5|43|44639.59|0.10|0.08|N|O|1997-12-31|1998-02-20|1998-01-26|COLLECT COD|RAIL|oze according to the accou|
+4833|107|10|1|31|31220.10|0.08|0.04|N|O|1996-06-24|1996-07-15|1996-07-02|NONE|SHIP|ven instructions cajole against the caref|
+4833|117|7|2|11|11188.21|0.03|0.01|N|O|1996-08-24|1996-07-26|1996-09-19|NONE|REG AIR|s nag above the busily sile|
+4833|18|9|3|26|23868.26|0.08|0.04|N|O|1996-05-13|1996-07-12|1996-05-31|NONE|SHIP|s packages. even gif|
+4833|36|7|4|19|17784.57|0.07|0.07|N|O|1996-08-21|1996-07-09|1996-09-10|TAKE BACK RETURN|AIR|y quick theodolit|
+4833|35|1|5|4|3740.12|0.10|0.02|N|O|1996-08-16|1996-06-29|1996-08-22|NONE|AIR|y pending packages sleep blithely regular r|
+4834|183|4|1|27|29245.86|0.06|0.02|N|O|1997-01-09|1996-10-27|1997-01-27|DELIVER IN PERSON|RAIL|es nag blithe|
+4834|71|1|2|26|25247.82|0.01|0.00|N|O|1996-10-04|1996-10-21|1996-10-10|DELIVER IN PERSON|TRUCK|ages dazzle carefully. slyly daring foxes|
+4834|23|2|3|34|31382.68|0.03|0.01|N|O|1996-12-09|1996-11-26|1996-12-10|NONE|MAIL|ounts haggle bo|
+4834|143|10|4|38|39639.32|0.03|0.06|N|O|1997-01-10|1996-12-06|1997-01-22|COLLECT COD|FOB|alongside of the carefully even plate|
+4835|179|10|1|18|19425.06|0.00|0.03|R|F|1995-02-17|1994-12-14|1995-03-17|DELIVER IN PERSON|MAIL|eat furiously against the slyly |
+4835|91|3|2|3|2973.27|0.09|0.06|R|F|1995-01-24|1995-01-12|1995-02-16|COLLECT COD|AIR|etimes final pac|
+4835|86|7|3|27|26624.16|0.05|0.00|A|F|1994-12-10|1994-12-13|1995-01-02|DELIVER IN PERSON|REG AIR| accounts after the car|
+4835|102|7|4|23|23048.30|0.08|0.07|A|F|1995-02-05|1995-01-04|1995-02-28|NONE|SHIP|e carefully regular foxes. deposits are sly|
+4836|162|1|1|22|23367.52|0.01|0.03|N|O|1997-03-03|1997-02-23|1997-03-04|NONE|SHIP|al pinto beans. care|
+4836|48|5|2|16|15168.64|0.07|0.08|N|O|1997-01-14|1997-03-05|1997-01-30|COLLECT COD|MAIL|gular packages against the express reque|
+4836|76|4|3|14|13664.98|0.03|0.08|N|O|1997-02-21|1997-02-06|1997-03-08|COLLECT COD|MAIL|lites. unusual, bold dolphins ar|
+4836|106|1|4|15|15091.50|0.10|0.00|N|O|1997-03-08|1997-03-14|1997-03-30|TAKE BACK RETURN|TRUCK|eep slyly. even requests cajole|
+4836|51|6|5|12|11412.60|0.01|0.04|N|O|1997-02-02|1997-02-10|1997-02-03|COLLECT COD|TRUCK|sly ironic accoun|
+4837|42|1|1|16|15072.64|0.09|0.04|N|O|1998-08-12|1998-06-06|1998-08-26|COLLECT COD|FOB|ing requests are blithely regular instructi|
+4837|193|5|2|16|17491.04|0.01|0.02|N|O|1998-08-19|1998-06-18|1998-08-26|NONE|RAIL|counts cajole slyly furiou|
+4837|68|5|3|42|40658.52|0.10|0.00|N|O|1998-06-19|1998-07-06|1998-06-23|COLLECT COD|MAIL|o the furiously final theodolites boost|
+4838|122|3|1|35|35774.20|0.01|0.00|R|F|1992-10-30|1992-10-23|1992-11-21|TAKE BACK RETURN|RAIL|ly blithely unusual foxes. even package|
+4838|148|5|2|2|2096.28|0.03|0.08|R|F|1992-08-11|1992-09-16|1992-08-26|COLLECT COD|MAIL|hely final notornis are furiously blithe|
+4838|52|3|3|26|24753.30|0.06|0.04|R|F|1992-09-03|1992-10-25|1992-09-11|TAKE BACK RETURN|FOB|ular requests boost about the packages. r|
+4839|60|2|1|5|4800.30|0.10|0.07|A|F|1994-09-07|1994-07-15|1994-10-05|DELIVER IN PERSON|FOB|ses integrate. regular deposits are about |
+4839|10|1|2|25|22750.25|0.02|0.02|R|F|1994-05-20|1994-07-08|1994-05-30|NONE|REG AIR|regular packages ab|
+4839|60|1|3|18|17281.08|0.06|0.01|R|F|1994-05-18|1994-06-13|1994-06-09|TAKE BACK RETURN|FOB|blithely ironic theodolites use along|
+4839|100|1|4|19|19001.90|0.07|0.08|R|F|1994-05-20|1994-07-14|1994-05-30|NONE|REG AIR| deposits sublate furiously ir|
+4839|71|10|5|9|8739.63|0.05|0.01|R|F|1994-06-17|1994-06-18|1994-07-10|NONE|SHIP|ounts haggle carefully above|
+4864|150|9|1|28|29404.20|0.06|0.08|A|F|1993-02-06|1992-12-15|1993-02-10|COLLECT COD|REG AIR|thely around the bli|
+4864|38|4|2|38|35645.14|0.10|0.02|R|F|1992-12-20|1993-01-07|1993-01-06|TAKE BACK RETURN|SHIP|ording to the ironic, ir|
+4864|133|4|3|45|46490.85|0.02|0.01|A|F|1992-11-17|1993-01-02|1992-11-26|COLLECT COD|SHIP|round the furiously careful pa|
+4864|31|2|4|46|42827.38|0.07|0.03|A|F|1993-02-24|1993-01-02|1993-03-17|TAKE BACK RETURN|RAIL|sts use carefully across the carefull|
+4865|162|7|1|16|16994.56|0.07|0.05|N|O|1997-10-02|1997-08-20|1997-10-04|COLLECT COD|TRUCK|osits haggle. fur|
+4865|137|8|2|4|4148.52|0.07|0.01|N|O|1997-07-24|1997-07-25|1997-08-07|TAKE BACK RETURN|FOB|sts. blithely special instruction|
+4865|68|3|3|44|42594.64|0.10|0.08|N|O|1997-07-25|1997-08-20|1997-08-22|COLLECT COD|FOB|even deposits sleep against the quickly r|
+4865|50|3|4|21|19951.05|0.04|0.02|N|O|1997-07-17|1997-08-10|1997-07-21|NONE|RAIL|eposits detect sly|
+4865|54|9|5|33|31483.65|0.00|0.05|N|O|1997-07-17|1997-08-16|1997-07-30|TAKE BACK RETURN|FOB|y pending notornis ab|
+4865|65|2|6|47|45357.82|0.00|0.05|N|O|1997-08-26|1997-08-07|1997-08-31|NONE|RAIL|y unusual packages. packages|
+4866|11|8|1|9|8199.09|0.01|0.05|N|O|1997-08-30|1997-09-18|1997-09-24|TAKE BACK RETURN|MAIL|ven dependencies x-ray. quic|
+4866|102|3|2|1|1002.10|0.06|0.00|N|O|1997-10-15|1997-10-01|1997-11-14|TAKE BACK RETURN|AIR|latelets nag. q|
+4866|131|7|3|17|17529.21|0.07|0.00|N|O|1997-11-26|1997-10-11|1997-12-12|COLLECT COD|TRUCK|ess packages doubt. even somas wake f|
+4867|82|3|1|7|6874.56|0.09|0.03|A|F|1992-07-17|1992-08-17|1992-07-22|COLLECT COD|FOB|e carefully even packages. slyly ironic i|
+4867|160|8|2|3|3180.48|0.04|0.08|R|F|1992-07-04|1992-07-15|1992-07-21|NONE|AIR|yly silent deposits|
+4868|73|3|1|47|45734.29|0.03|0.03|N|O|1997-04-29|1997-04-27|1997-05-11|DELIVER IN PERSON|SHIP|gle unusual, fluffy packages. foxes cajol|
+4868|180|1|2|8|8641.44|0.10|0.08|N|O|1997-03-26|1997-05-09|1997-04-16|NONE|RAIL|ly special th|
+4868|191|2|3|49|53468.31|0.09|0.03|N|O|1997-04-23|1997-05-07|1997-04-26|NONE|SHIP|ys engage. th|
+4868|80|1|4|34|33322.72|0.04|0.02|N|O|1997-05-19|1997-04-27|1997-06-15|NONE|RAIL|en instructions about th|
+4868|122|3|5|22|22486.64|0.07|0.06|N|O|1997-04-26|1997-05-16|1997-05-01|DELIVER IN PERSON|FOB|osits. final foxes boost regular,|
+4869|41|8|1|31|29172.24|0.10|0.01|A|F|1995-01-17|1994-11-30|1995-02-02|NONE|SHIP|ins. always unusual ideas across the ir|
+4869|58|3|2|24|22993.20|0.09|0.06|A|F|1994-11-17|1994-11-07|1994-11-27|COLLECT COD|MAIL|olites cajole after the ideas. special t|
+4869|157|8|3|25|26428.75|0.00|0.05|R|F|1994-11-25|1994-11-14|1994-12-19|DELIVER IN PERSON|AIR|e according t|
+4869|103|8|4|24|24074.40|0.10|0.07|R|F|1994-11-23|1994-11-18|1994-12-11|DELIVER IN PERSON|MAIL|se deposits above the sly, q|
+4869|173|2|5|42|45073.14|0.07|0.04|R|F|1994-10-16|1994-12-10|1994-11-07|TAKE BACK RETURN|REG AIR| slyly even instructions. |
+4869|122|5|6|30|30663.60|0.00|0.05|A|F|1995-01-09|1994-11-20|1995-02-02|COLLECT COD|RAIL|gedly even requests. s|
+4870|48|5|1|49|46453.96|0.05|0.05|R|F|1994-11-14|1994-10-24|1994-12-12|TAKE BACK RETURN|SHIP| regular packages |
+4870|127|10|2|6|6162.72|0.06|0.08|A|F|1994-09-09|1994-10-16|1994-09-21|DELIVER IN PERSON|TRUCK|ress requests. bold, silent pinto bea|
+4870|31|2|3|5|4655.15|0.05|0.00|R|F|1994-10-11|1994-10-07|1994-10-24|NONE|AIR|s haggle furiously. slyly ironic dinos|
+4870|6|9|4|4|3624.00|0.03|0.08|A|F|1994-10-23|1994-09-16|1994-11-04|COLLECT COD|RAIL|its wake quickly. slyly quick|
+4870|71|1|5|36|34958.52|0.09|0.06|A|F|1994-09-06|1994-09-17|1994-10-01|COLLECT COD|REG AIR| instructions. carefully pending pac|
+4871|177|5|1|14|15080.38|0.07|0.03|N|O|1995-09-30|1995-07-29|1995-10-18|TAKE BACK RETURN|REG AIR|inst the never ironic |
+4871|161|6|2|17|18039.72|0.07|0.03|N|O|1995-09-09|1995-09-01|1995-10-02|DELIVER IN PERSON|AIR|es. carefully ev|
+4871|63|4|3|3|2889.18|0.03|0.06|N|O|1995-10-03|1995-08-10|1995-10-06|DELIVER IN PERSON|TRUCK|y special packages wak|
+4871|149|8|4|35|36719.90|0.08|0.07|N|O|1995-08-11|1995-07-18|1995-08-29|DELIVER IN PERSON|TRUCK|ackages sle|
+4871|152|3|5|10|10521.50|0.09|0.02|N|O|1995-09-12|1995-09-02|1995-10-05|TAKE BACK RETURN|AIR|s integrate after the a|
+4871|136|2|6|36|37300.68|0.02|0.08|N|O|1995-09-18|1995-08-29|1995-10-05|TAKE BACK RETURN|AIR|ely according|
+4871|140|6|7|10|10401.40|0.10|0.02|N|O|1995-07-13|1995-08-19|1995-07-29|NONE|REG AIR|p ironic theodolites. slyly even platel|
+4896|41|2|1|19|17879.76|0.09|0.05|A|F|1992-12-13|1992-11-13|1993-01-09|NONE|AIR|nusual requ|
+4896|140|1|2|44|45766.16|0.04|0.03|A|F|1992-11-24|1992-11-15|1992-12-18|COLLECT COD|MAIL|e after the slowly f|
+4896|58|10|3|6|5748.30|0.04|0.04|A|F|1992-10-30|1992-11-12|1992-11-28|DELIVER IN PERSON|TRUCK|usly regular deposits|
+4896|23|4|4|5|4615.10|0.08|0.02|R|F|1992-12-02|1992-11-11|1992-12-19|COLLECT COD|SHIP|eposits hang carefully. sly|
+4896|86|7|5|21|20707.68|0.07|0.08|R|F|1992-11-18|1992-11-18|1992-11-29|DELIVER IN PERSON|TRUCK|ly express deposits. carefully pending depo|
+4897|55|6|1|26|24831.30|0.01|0.01|R|F|1992-12-22|1992-10-25|1992-12-27|DELIVER IN PERSON|TRUCK|. carefully ironic dep|
+4897|143|6|2|34|35466.76|0.02|0.00|R|F|1992-12-31|1992-11-11|1993-01-30|COLLECT COD|AIR|ts. special dependencies use fluffily |
+4897|55|7|3|42|40112.10|0.09|0.03|A|F|1992-09-23|1992-10-28|1992-10-02|DELIVER IN PERSON|FOB|sts. blithely regular deposits will have|
+4897|104|5|4|19|19077.90|0.03|0.00|A|F|1992-11-08|1992-12-14|1992-12-03|DELIVER IN PERSON|FOB|! ironic, pending dependencies doze furiou|
+4898|72|1|1|44|42771.08|0.07|0.02|A|F|1994-09-13|1994-08-18|1994-09-16|NONE|FOB|y regular grouches about|
+4899|34|10|1|14|13076.42|0.06|0.00|R|F|1993-11-10|1994-01-10|1993-11-20|NONE|REG AIR| foxes eat|
+4900|116|3|1|40|40644.40|0.10|0.03|A|F|1992-09-02|1992-09-25|1992-09-21|COLLECT COD|TRUCK|heodolites. request|
+4900|77|8|2|33|32243.31|0.06|0.06|R|F|1992-08-18|1992-09-20|1992-08-19|COLLECT COD|MAIL|nto beans nag slyly reg|
+4900|103|8|3|48|48148.80|0.02|0.00|R|F|1992-09-18|1992-08-14|1992-09-28|TAKE BACK RETURN|MAIL|uickly ironic ideas kindle s|
+4900|32|3|4|20|18640.60|0.05|0.00|R|F|1992-09-22|1992-09-23|1992-09-27|TAKE BACK RETURN|MAIL|yers. accounts affix somet|
+4900|105|8|5|40|40204.00|0.03|0.02|R|F|1992-07-14|1992-09-05|1992-07-20|NONE|REG AIR|luffily final dol|
+4900|103|6|6|46|46142.60|0.06|0.08|R|F|1992-07-11|1992-09-19|1992-07-16|TAKE BACK RETURN|SHIP|ly final acco|
+4901|141|10|1|37|38522.18|0.00|0.04|N|O|1998-01-26|1998-02-20|1998-01-31|DELIVER IN PERSON|TRUCK| furiously ev|
+4901|165|4|2|12|12781.92|0.00|0.04|N|O|1998-01-12|1998-02-06|1998-02-03|COLLECT COD|REG AIR|y unusual deposits prom|
+4901|120|4|3|16|16321.92|0.05|0.08|N|O|1998-04-19|1998-03-18|1998-04-21|NONE|AIR|deposits. blithely fin|
+4901|36|7|4|41|38377.23|0.03|0.00|N|O|1998-03-18|1998-02-18|1998-04-14|TAKE BACK RETURN|AIR|efully bold packages affix carefully eve|
+4901|116|7|5|40|40644.40|0.06|0.02|N|O|1998-01-08|1998-01-30|1998-01-15|DELIVER IN PERSON|MAIL|ect across the furiou|
+4902|196|10|1|22|24116.18|0.00|0.04|N|O|1998-10-17|1998-08-10|1998-10-21|COLLECT COD|RAIL|r the furiously final fox|
+4902|83|4|2|1|983.08|0.09|0.04|N|O|1998-10-12|1998-08-20|1998-11-08|NONE|RAIL|daring foxes? even, bold requests wake f|
+4903|121|2|1|1|1021.12|0.06|0.03|R|F|1992-04-23|1992-06-13|1992-05-03|NONE|SHIP|nusual requests|
+4903|165|6|2|6|6390.96|0.09|0.07|R|F|1992-04-01|1992-05-16|1992-04-11|DELIVER IN PERSON|SHIP|azzle quickly along the blithely final pla|
+4903|120|10|3|27|27543.24|0.07|0.06|A|F|1992-06-29|1992-06-09|1992-07-08|COLLECT COD|RAIL|pinto beans are; |
+4928|100|1|1|4|4000.40|0.04|0.02|R|F|1993-10-25|1993-12-24|1993-11-16|TAKE BACK RETURN|REG AIR|bout the slyly final accounts. carefull|
+4928|93|4|2|20|19861.80|0.03|0.08|A|F|1994-01-19|1993-11-29|1994-02-13|DELIVER IN PERSON|SHIP|quiet theodolites ca|
+4928|149|8|3|34|35670.76|0.06|0.05|A|F|1993-10-12|1993-12-31|1993-10-14|DELIVER IN PERSON|AIR|, regular depos|
+4929|14|1|1|20|18280.20|0.00|0.04|N|O|1996-03-12|1996-05-23|1996-03-20|COLLECT COD|REG AIR| final pinto beans detect. final,|
+4929|79|7|2|40|39162.80|0.08|0.03|N|O|1996-05-30|1996-04-13|1996-06-22|TAKE BACK RETURN|AIR|unts against |
+4929|77|7|3|32|31266.24|0.08|0.02|N|O|1996-04-28|1996-05-23|1996-04-30|COLLECT COD|TRUCK|usly at the blithely pending pl|
+4929|109|4|4|26|26236.60|0.00|0.05|N|O|1996-06-10|1996-05-29|1996-06-26|DELIVER IN PERSON|RAIL| slyly. fl|
+4929|67|8|5|24|23209.44|0.09|0.05|N|O|1996-04-15|1996-04-30|1996-05-09|NONE|MAIL| accounts boost|
+4930|187|8|1|35|38051.30|0.03|0.01|A|F|1994-07-09|1994-07-30|1994-07-15|NONE|RAIL|lose slyly regular dependencies. fur|
+4930|115|5|2|20|20302.20|0.02|0.04|A|F|1994-08-21|1994-06-17|1994-08-24|COLLECT COD|FOB|he carefully|
+4930|168|7|3|28|29908.48|0.00|0.08|R|F|1994-08-27|1994-06-27|1994-09-18|COLLECT COD|TRUCK|e ironic, unusual courts. regula|
+4930|166|7|4|42|44778.72|0.00|0.00|A|F|1994-06-18|1994-06-22|1994-07-10|COLLECT COD|AIR|ions haggle. furiously regular ideas use |
+4930|190|1|5|38|41427.22|0.02|0.03|A|F|1994-06-06|1994-06-18|1994-07-03|TAKE BACK RETURN|AIR|bold requests sleep never|
+4931|194|7|1|1|1094.19|0.08|0.06|A|F|1995-01-24|1994-12-19|1995-02-07|DELIVER IN PERSON|SHIP| furiously |
+4931|151|3|2|8|8409.20|0.06|0.02|R|F|1994-12-15|1995-01-14|1995-01-06|NONE|SHIP|ts boost. packages wake sly|
+4931|144|5|3|20|20882.80|0.09|0.00|A|F|1995-01-25|1994-12-21|1995-02-06|DELIVER IN PERSON|MAIL|the furious|
+4931|200|4|4|50|55010.00|0.04|0.01|A|F|1994-12-15|1994-12-18|1994-12-23|COLLECT COD|REG AIR|s haggle al|
+4931|150|7|5|25|26253.75|0.05|0.05|R|F|1994-12-19|1995-01-05|1994-12-21|COLLECT COD|FOB|aggle bravely according to the quic|
+4931|103|6|6|8|8024.80|0.02|0.03|A|F|1995-02-16|1994-12-30|1995-03-15|DELIVER IN PERSON|SHIP|dependencies are slyly|
+4932|51|3|1|13|12363.65|0.04|0.03|A|F|1993-09-13|1993-10-16|1993-09-20|DELIVER IN PERSON|SHIP|slyly according to the furiously fin|
+4932|103|10|2|15|15046.50|0.01|0.02|R|F|1993-11-15|1993-10-25|1993-11-29|NONE|REG AIR|yly. unusu|
+4932|87|8|3|5|4935.40|0.06|0.06|A|F|1993-10-01|1993-09-13|1993-10-04|NONE|MAIL| haggle furiously. slyly ironic packages sl|
+4932|98|1|4|11|10978.99|0.09|0.06|A|F|1993-09-21|1993-09-30|1993-09-23|COLLECT COD|SHIP|as. special depende|
+4933|32|8|1|48|44737.44|0.08|0.00|N|O|1995-10-10|1995-10-03|1995-11-04|COLLECT COD|SHIP|ideas. sly|
+4933|82|3|2|2|1964.16|0.09|0.00|N|O|1995-10-01|1995-09-29|1995-10-19|DELIVER IN PERSON|MAIL|ctions nag final instructions. accou|
+4934|97|10|1|48|47860.32|0.00|0.01|N|O|1997-05-20|1997-04-22|1997-06-02|TAKE BACK RETURN|SHIP| ideas cajol|
+4934|110|1|2|41|41414.51|0.06|0.06|N|O|1997-06-04|1997-04-11|1997-06-25|TAKE BACK RETURN|FOB|wake final, ironic f|
+4934|140|1|3|8|8321.12|0.03|0.06|N|O|1997-05-20|1997-04-30|1997-05-27|TAKE BACK RETURN|MAIL|arefully express pains cajo|
+4934|148|5|4|9|9433.26|0.06|0.08|N|O|1997-06-10|1997-04-09|1997-06-12|TAKE BACK RETURN|REG AIR| haggle alongside of the|
+4934|138|9|5|29|30105.77|0.09|0.03|N|O|1997-04-10|1997-05-05|1997-05-04|DELIVER IN PERSON|AIR|aggle furiously among the busily final re|
+4934|52|3|6|42|39986.10|0.00|0.07|N|O|1997-03-19|1997-05-05|1997-03-25|NONE|MAIL|ven, ironic ideas|
+4934|11|5|7|2|1822.02|0.10|0.06|N|O|1997-06-05|1997-03-26|1997-06-09|COLLECT COD|MAIL|ongside of the brave, regula|
+4935|161|2|1|13|13795.08|0.09|0.01|A|F|1993-06-20|1993-08-13|1993-06-27|COLLECT COD|REG AIR|ly requests. final deposits might |
+4935|40|6|2|37|34781.48|0.01|0.05|R|F|1993-08-30|1993-07-23|1993-09-07|TAKE BACK RETURN|RAIL|y even dependencies nag a|
+4935|11|8|3|24|21864.24|0.06|0.04|A|F|1993-05-29|1993-08-17|1993-06-22|NONE|RAIL|ly quickly s|
+4935|45|6|4|49|46306.96|0.06|0.01|A|F|1993-09-16|1993-08-21|1993-10-12|COLLECT COD|TRUCK|ffily after the furiou|
+4935|10|1|5|14|12740.14|0.08|0.08|A|F|1993-05-30|1993-07-25|1993-05-31|COLLECT COD|FOB|slowly. blith|
+4935|188|9|6|36|39174.48|0.10|0.00|R|F|1993-07-11|1993-07-04|1993-08-01|DELIVER IN PERSON|RAIL|requests across the quick|
+4960|18|5|1|36|33048.36|0.01|0.05|R|F|1995-03-06|1995-05-04|1995-04-05|TAKE BACK RETURN|RAIL|c, unusual accou|
+4960|45|8|2|6|5670.24|0.03|0.08|R|F|1995-03-21|1995-05-13|1995-04-14|TAKE BACK RETURN|SHIP|ual package|
+4960|149|8|3|9|9442.26|0.01|0.03|A|F|1995-03-20|1995-05-05|1995-04-17|COLLECT COD|RAIL|e blithely carefully fina|
+4960|120|7|4|14|14281.68|0.00|0.06|A|F|1995-04-03|1995-04-17|1995-04-07|NONE|RAIL|accounts. warhorses are. grouches |
+4960|98|1|5|8|7984.72|0.07|0.04|R|F|1995-03-14|1995-04-18|1995-04-09|NONE|FOB|as. busily regular packages nag. |
+4960|146|7|6|37|38707.18|0.10|0.04|R|F|1995-05-23|1995-04-12|1995-06-01|DELIVER IN PERSON|MAIL|ending theodolites w|
+4960|170|1|7|42|44947.14|0.08|0.07|A|F|1995-04-19|1995-04-11|1995-05-08|NONE|SHIP|s requests cajole. |
+4961|44|7|1|38|35873.52|0.10|0.07|N|O|1998-07-09|1998-06-03|1998-07-11|TAKE BACK RETURN|FOB|e on the blithely bold accounts. unu|
+4961|60|5|2|1|960.06|0.08|0.08|N|O|1998-07-08|1998-05-25|1998-07-12|DELIVER IN PERSON|MAIL|s affix carefully silent dependen|
+4961|162|3|3|41|43548.56|0.02|0.02|N|O|1998-07-15|1998-06-15|1998-08-05|TAKE BACK RETURN|REG AIR|ily against the n|
+4961|100|3|4|10|10001.00|0.02|0.04|N|O|1998-04-15|1998-07-03|1998-04-18|DELIVER IN PERSON|MAIL|quests. regular, ironic ideas at the ironi|
+4962|19|6|1|46|42274.46|0.01|0.07|R|F|1993-08-23|1993-09-04|1993-08-27|COLLECT COD|REG AIR| pinto beans grow about the sl|
+4963|168|5|1|38|40590.08|0.08|0.02|N|O|1996-12-25|1996-12-12|1997-01-02|COLLECT COD|AIR|tegrate daringly accou|
+4963|76|4|2|16|15617.12|0.00|0.03|N|O|1996-11-20|1997-01-13|1996-12-06|COLLECT COD|MAIL| carefully slyly u|
+4964|133|9|1|29|29960.77|0.04|0.01|N|O|1997-10-18|1997-08-30|1997-11-01|NONE|AIR|k accounts nag carefully-- ironic, fin|
+4964|148|5|2|46|48214.44|0.06|0.06|N|O|1997-10-05|1997-09-12|1997-10-11|NONE|TRUCK|althy deposits|
+4964|143|4|3|18|18776.52|0.00|0.06|N|O|1997-10-13|1997-09-01|1997-11-10|DELIVER IN PERSON|AIR| platelets. furio|
+4964|180|10|4|12|12962.16|0.08|0.01|N|O|1997-09-03|1997-10-25|1997-09-15|NONE|TRUCK|ully silent instructions ca|
+4964|41|10|5|42|39523.68|0.06|0.04|N|O|1997-09-04|1997-08-28|1997-10-02|TAKE BACK RETURN|AIR| hinder. idly even|
+4964|193|7|6|22|24050.18|0.04|0.08|N|O|1997-09-11|1997-10-06|1997-09-29|NONE|AIR|equests doubt quickly. caref|
+4964|173|4|7|28|30048.76|0.00|0.05|N|O|1997-08-30|1997-09-15|1997-09-18|COLLECT COD|RAIL|among the carefully regula|
+4965|131|2|1|28|28871.64|0.05|0.03|A|F|1994-01-02|1993-11-20|1994-01-04|TAKE BACK RETURN|REG AIR| deposits. requests sublate quickly |
+4965|13|10|2|25|22825.25|0.10|0.02|R|F|1994-02-05|1993-12-15|1994-02-24|TAKE BACK RETURN|MAIL|wake at the carefully speci|
+4965|101|8|3|27|27029.70|0.05|0.06|R|F|1993-11-06|1993-12-24|1993-11-30|TAKE BACK RETURN|SHIP|efully final foxes|
+4965|138|9|4|33|34258.29|0.04|0.04|A|F|1993-12-31|1993-11-29|1994-01-27|DELIVER IN PERSON|REG AIR|iously slyly|
+4966|76|6|1|10|9760.70|0.06|0.03|N|O|1996-09-23|1996-11-02|1996-10-07|TAKE BACK RETURN|SHIP| requests. carefully pending requests|
+4966|194|6|2|6|6565.14|0.02|0.01|N|O|1996-12-09|1996-11-29|1996-12-30|NONE|AIR|d deposits are sly excuses. slyly iro|
+4966|165|6|3|7|7456.12|0.00|0.01|N|O|1996-12-08|1996-10-09|1997-01-06|COLLECT COD|MAIL|ckly ironic tithe|
+4966|16|6|4|26|23816.26|0.08|0.03|N|O|1996-11-14|1996-11-29|1996-12-05|COLLECT COD|REG AIR|nt pearls haggle carefully slyly even |
+4966|144|1|5|12|12529.68|0.02|0.07|N|O|1996-12-07|1996-11-23|1996-12-20|DELIVER IN PERSON|RAIL|eodolites. ironic requests across the exp|
+4967|71|1|1|50|48553.50|0.07|0.01|N|O|1997-05-27|1997-05-13|1997-06-12|NONE|REG AIR|kages. final, unusual accounts c|
+4967|53|5|2|43|40981.15|0.00|0.07|N|O|1997-05-28|1997-04-10|1997-06-09|NONE|TRUCK|ons. slyly ironic requests|
+4967|50|1|3|15|14250.75|0.08|0.02|N|O|1997-04-16|1997-04-12|1997-05-08|TAKE BACK RETURN|MAIL|y. blithel|
+4967|123|2|4|1|1023.12|0.10|0.07|N|O|1997-06-04|1997-03-29|1997-06-23|NONE|FOB|osits. unusual frets thrash furiously|
+4992|184|5|1|42|45535.56|0.07|0.01|R|F|1992-07-19|1992-06-16|1992-08-17|TAKE BACK RETURN|RAIL|foxes about the quickly final platele|
+4992|147|4|2|47|49215.58|0.10|0.08|A|F|1992-09-04|1992-08-05|1992-09-21|COLLECT COD|MAIL|atterns use fluffily.|
+4992|144|7|3|17|17750.38|0.03|0.03|A|F|1992-07-05|1992-07-19|1992-07-30|TAKE BACK RETURN|FOB|s along the perma|
+4992|70|7|4|25|24251.75|0.04|0.06|R|F|1992-08-06|1992-07-11|1992-08-20|NONE|SHIP|ly about the never ironic requests. pe|
+4992|139|5|5|23|23899.99|0.01|0.08|R|F|1992-06-28|1992-07-15|1992-07-12|DELIVER IN PERSON|MAIL|uickly regul|
+4992|163|8|6|44|46779.04|0.05|0.02|A|F|1992-06-01|1992-07-22|1992-06-03|NONE|RAIL|rmanent, sly packages print slyly. regula|
+4993|38|4|1|34|31893.02|0.05|0.00|R|F|1994-09-21|1994-10-31|1994-09-24|TAKE BACK RETURN|REG AIR|ular, pending packages at the even packa|
+4993|129|4|2|39|40135.68|0.03|0.08|R|F|1994-09-10|1994-09-04|1994-09-26|COLLECT COD|SHIP|pending, regular requests solve caref|
+4993|166|1|3|42|44778.72|0.06|0.00|A|F|1994-08-27|1994-09-24|1994-09-05|NONE|MAIL| final packages at the q|
+4993|158|6|4|31|32802.65|0.10|0.06|A|F|1994-10-02|1994-10-29|1994-10-15|NONE|AIR|nwind thinly platelets. a|
+4994|156|8|1|36|38021.40|0.00|0.06|N|O|1996-09-29|1996-07-30|1996-10-03|TAKE BACK RETURN|TRUCK|ess ideas. blithely silent brai|
+4994|80|9|2|47|46063.76|0.04|0.05|N|O|1996-09-20|1996-08-04|1996-10-15|COLLECT COD|TRUCK|sts. blithely close ideas sleep quic|
+4994|183|4|3|29|31412.22|0.08|0.01|N|O|1996-08-26|1996-09-27|1996-09-25|DELIVER IN PERSON|RAIL|ptotes boost carefully|
+4994|39|10|4|40|37561.20|0.01|0.06|N|O|1996-08-25|1996-08-16|1996-09-07|TAKE BACK RETURN|REG AIR|eposits. regula|
+4994|42|9|5|24|22608.96|0.01|0.07|N|O|1996-08-19|1996-09-24|1996-08-25|TAKE BACK RETURN|FOB|s. slyly ironic deposits cajole f|
+4994|73|4|6|6|5838.42|0.01|0.02|N|O|1996-09-05|1996-08-04|1996-09-30|TAKE BACK RETURN|FOB|grate carefully around th|
+4994|130|1|7|31|31934.03|0.07|0.04|N|O|1996-10-14|1996-09-23|1996-11-08|TAKE BACK RETURN|RAIL|lar decoys cajole fluffil|
+4995|65|4|1|16|15440.96|0.02|0.05|N|O|1996-02-27|1996-04-03|1996-02-29|DELIVER IN PERSON|MAIL|egular, bold packages. accou|
+4995|81|2|2|43|42186.44|0.00|0.06|N|O|1996-02-24|1996-02-20|1996-03-07|NONE|AIR|ts. blithely silent ideas after t|
+4995|156|7|3|22|23235.30|0.03|0.06|N|O|1996-03-17|1996-03-12|1996-04-01|DELIVER IN PERSON|MAIL|s wake furious, express dependencies.|
+4995|40|1|4|9|8460.36|0.07|0.07|N|O|1996-03-07|1996-03-17|1996-03-11|DELIVER IN PERSON|FOB| ironic packages cajole across t|
+4995|148|7|5|48|50310.72|0.08|0.07|N|O|1996-03-22|1996-04-01|1996-04-07|NONE|SHIP|t blithely. requests affix blithely. |
+4995|110|5|6|48|48485.28|0.09|0.07|N|O|1996-04-14|1996-04-04|1996-05-07|DELIVER IN PERSON|RAIL|nstructions. carefully final depos|
+4996|56|1|1|35|33461.75|0.07|0.01|A|F|1992-10-30|1992-10-27|1992-11-05|TAKE BACK RETURN|SHIP|s. unusual, regular dolphins integrate care|
+4996|156|7|2|39|41189.85|0.02|0.07|A|F|1992-09-19|1992-10-19|1992-10-06|COLLECT COD|FOB|equests are carefully final|
+4996|128|7|3|12|12337.44|0.04|0.06|R|F|1993-01-09|1992-11-22|1993-02-04|DELIVER IN PERSON|SHIP|usly bold requests sleep dogge|
+4996|144|3|4|13|13573.82|0.00|0.00|A|F|1992-09-17|1992-12-02|1992-10-07|DELIVER IN PERSON|TRUCK|o beans use about the furious|
+4997|79|7|1|44|43079.08|0.02|0.05|N|O|1998-06-09|1998-06-12|1998-07-07|NONE|RAIL|r escapades ca|
+4997|17|7|2|5|4585.05|0.02|0.04|N|O|1998-05-16|1998-06-05|1998-06-07|COLLECT COD|REG AIR|cuses are furiously unusual asymptotes|
+4997|58|9|3|24|22993.20|0.04|0.06|N|O|1998-04-20|1998-04-23|1998-05-16|NONE|AIR|xpress, bo|
+4997|40|6|4|5|4700.20|0.10|0.03|N|O|1998-06-12|1998-04-24|1998-06-13|DELIVER IN PERSON|TRUCK|aggle slyly alongside of the slyly i|
+4997|22|7|5|46|42412.92|0.00|0.04|N|O|1998-04-28|1998-06-04|1998-05-08|TAKE BACK RETURN|SHIP|ecial courts are carefully|
+4997|29|2|6|2|1858.04|0.07|0.01|N|O|1998-07-09|1998-06-10|1998-07-21|TAKE BACK RETURN|REG AIR|counts. slyl|
+4998|154|2|1|12|12649.80|0.04|0.03|A|F|1992-02-20|1992-03-06|1992-03-01|TAKE BACK RETURN|RAIL| sleep slyly furiously final accounts. ins|
+4998|183|4|2|15|16247.70|0.06|0.00|R|F|1992-04-24|1992-03-21|1992-05-02|NONE|REG AIR|heodolites sleep quickly.|
+4998|59|10|3|27|25894.35|0.06|0.02|R|F|1992-03-17|1992-02-26|1992-04-05|DELIVER IN PERSON|MAIL|the blithely ironic |
+4998|63|10|4|47|45263.82|0.10|0.04|A|F|1992-02-07|1992-03-07|1992-02-19|DELIVER IN PERSON|TRUCK|mong the careful|
+4998|145|4|5|24|25083.36|0.01|0.04|R|F|1992-01-25|1992-03-16|1992-01-27|COLLECT COD|REG AIR| unwind about|
+4998|99|1|6|8|7992.72|0.03|0.07|A|F|1992-05-01|1992-03-03|1992-05-24|TAKE BACK RETURN|AIR|ions nag quickly according to the theodolit|
+4999|153|8|1|30|31594.50|0.00|0.02|A|F|1993-08-20|1993-08-15|1993-08-30|NONE|AIR|ades cajole carefully unusual ide|
+4999|10|1|2|44|40040.44|0.03|0.01|A|F|1993-08-01|1993-08-04|1993-08-17|COLLECT COD|REG AIR|ependencies. slowly regu|
+4999|86|7|3|30|29582.40|0.09|0.01|R|F|1993-07-21|1993-08-11|1993-08-20|DELIVER IN PERSON|RAIL|s cajole among the blithel|
+5024|166|3|1|17|18124.72|0.10|0.02|N|O|1996-11-24|1997-01-10|1996-12-04|NONE|AIR| to the expre|
+5024|58|6|2|41|39280.05|0.06|0.01|N|O|1996-11-09|1996-12-03|1996-12-01|COLLECT COD|REG AIR|osits hinder carefully |
+5024|112|6|3|18|18217.98|0.04|0.03|N|O|1996-12-02|1997-01-16|1996-12-05|NONE|MAIL|zle carefully sauternes. quickly|
+5024|123|8|4|42|42971.04|0.03|0.06|N|O|1996-12-02|1996-12-08|1996-12-04|DELIVER IN PERSON|RAIL|tegrate. busily spec|
+5025|30|9|1|11|10230.33|0.00|0.04|N|O|1997-02-21|1997-04-16|1997-03-14|COLLECT COD|SHIP|the carefully final esc|
+5025|78|7|2|10|9780.70|0.07|0.04|N|O|1997-06-04|1997-04-29|1997-06-28|COLLECT COD|RAIL|lly silent deposits boost busily again|
+5026|96|8|1|13|12949.17|0.02|0.04|N|O|1997-12-23|1997-11-02|1998-01-03|TAKE BACK RETURN|SHIP|endencies sleep carefully alongs|
+5027|98|2|1|6|5988.54|0.04|0.05|N|O|1997-09-28|1997-11-24|1997-10-25|NONE|FOB|ar, ironic deposi|
+5027|62|3|2|39|37520.34|0.06|0.01|N|O|1997-09-09|1997-11-13|1997-09-21|TAKE BACK RETURN|FOB|ess requests! quickly regular pac|
+5027|126|5|3|32|32835.84|0.00|0.01|N|O|1997-11-13|1997-10-29|1997-11-18|TAKE BACK RETURN|RAIL|cording to|
+5027|26|7|4|37|34262.74|0.02|0.00|N|O|1997-10-05|1997-10-30|1997-10-26|NONE|REG AIR|ost slyly fluffily|
+5027|143|4|5|3|3129.42|0.03|0.06|N|O|1997-09-30|1997-11-26|1997-10-05|DELIVER IN PERSON|AIR|t the even mu|
+5027|87|8|6|25|24677.00|0.06|0.00|N|O|1997-09-16|1997-11-25|1997-10-08|TAKE BACK RETURN|RAIL|ic ideas. requests sleep fluffily am|
+5027|81|2|7|50|49054.00|0.07|0.02|N|O|1997-09-18|1997-11-07|1997-10-05|DELIVER IN PERSON|MAIL| beans dazzle according to the fluffi|
+5028|14|1|1|15|13710.15|0.07|0.07|R|F|1992-07-17|1992-07-16|1992-08-05|COLLECT COD|REG AIR|es are quickly final pains. furiously pend|
+5028|199|10|2|15|16487.85|0.03|0.07|R|F|1992-08-02|1992-07-09|1992-08-30|NONE|REG AIR|gular, bold pinto bea|
+5029|154|5|1|17|17920.55|0.02|0.01|A|F|1993-03-12|1992-12-18|1993-04-02|DELIVER IN PERSON|FOB|! packages boost blithely. furious|
+5029|97|9|2|2|1994.18|0.00|0.04|A|F|1992-11-25|1993-01-04|1992-12-20|DELIVER IN PERSON|MAIL|packages. furiously ironi|
+5030|102|3|1|22|22046.20|0.04|0.06|N|O|1998-09-01|1998-08-15|1998-09-30|TAKE BACK RETURN|TRUCK|. quickly regular foxes believe|
+5030|80|9|2|50|49004.00|0.05|0.06|N|O|1998-08-22|1998-07-25|1998-09-18|TAKE BACK RETURN|FOB|ss excuses serve bli|
+5031|50|1|1|15|14250.75|0.02|0.05|R|F|1995-04-01|1995-02-24|1995-04-12|DELIVER IN PERSON|AIR|yly pending theodolites.|
+5031|161|6|2|40|42446.40|0.10|0.04|A|F|1994-12-04|1995-01-27|1995-01-01|NONE|TRUCK|ns hang blithely across th|
+5031|154|6|3|4|4216.60|0.01|0.07|R|F|1994-12-26|1995-02-24|1995-01-11|NONE|RAIL|after the even frays: ironic, unusual th|
+5031|181|2|4|31|33516.58|0.10|0.08|R|F|1995-01-15|1995-01-08|1995-02-09|COLLECT COD|MAIL|ts across the even requests doze furiously|
+5056|48|7|1|7|6636.28|0.09|0.01|N|O|1997-04-28|1997-04-07|1997-05-15|DELIVER IN PERSON|TRUCK|rouches after the pending instruc|
+5056|197|1|2|19|20846.61|0.04|0.00|N|O|1997-03-24|1997-05-05|1997-04-23|DELIVER IN PERSON|AIR|c theodolites. ironic a|
+5056|90|1|3|23|22772.07|0.02|0.05|N|O|1997-05-12|1997-04-28|1997-05-25|NONE|SHIP|ickly regular requests cajole. depos|
+5056|87|8|4|14|13819.12|0.08|0.00|N|O|1997-06-09|1997-04-13|1997-07-06|COLLECT COD|SHIP|sts haggle carefully along the slyl|
+5057|37|3|1|38|35607.14|0.02|0.03|N|O|1997-10-24|1997-09-07|1997-10-30|TAKE BACK RETURN|MAIL|packages. stealthily bold wa|
+5057|8|1|2|45|40860.00|0.08|0.07|N|O|1997-09-20|1997-10-02|1997-10-20|NONE|FOB| asymptotes wake slyl|
+5058|193|5|1|16|17491.04|0.09|0.07|N|O|1998-07-12|1998-06-09|1998-07-15|DELIVER IN PERSON|SHIP| the special foxes |
+5059|70|5|1|5|4850.35|0.03|0.08|R|F|1993-12-23|1994-01-12|1993-12-24|TAKE BACK RETURN|FOB|ts affix slyly accordi|
+5059|123|2|2|19|19439.28|0.06|0.04|R|F|1994-03-02|1993-12-26|1994-03-14|TAKE BACK RETURN|MAIL| special ideas poach blithely qu|
+5059|77|7|3|45|43968.15|0.02|0.00|A|F|1994-01-28|1994-01-08|1994-02-18|DELIVER IN PERSON|MAIL|enly. requests doze. express, close pa|
+5060|25|8|1|27|24975.54|0.10|0.07|R|F|1992-07-23|1992-09-05|1992-08-07|COLLECT COD|SHIP|s. ironic |
+5060|32|8|2|28|26096.84|0.04|0.04|R|F|1992-09-25|1992-08-11|1992-10-09|NONE|REG AIR|c requests|
+5060|161|2|3|15|15917.40|0.06|0.01|A|F|1992-08-28|1992-08-20|1992-09-01|DELIVER IN PERSON|AIR|ular deposits sl|
+5061|165|2|1|18|19172.88|0.03|0.00|A|F|1993-10-20|1993-10-05|1993-10-28|TAKE BACK RETURN|SHIP|atelets among the ca|
+5061|198|1|2|8|8785.52|0.01|0.02|R|F|1993-09-07|1993-10-31|1993-10-04|DELIVER IN PERSON|REG AIR|regular foxes. ir|
+5061|24|5|3|26|24024.52|0.02|0.05|A|F|1993-11-07|1993-09-13|1993-11-13|NONE|REG AIR| cajole slyly. carefully spe|
+5062|101|4|1|9|9009.90|0.08|0.00|R|F|1993-01-02|1992-12-01|1993-01-20|TAKE BACK RETURN|MAIL| silent theodolites wake. c|
+5062|75|6|2|4|3900.28|0.02|0.02|R|F|1993-02-06|1992-12-14|1993-03-03|DELIVER IN PERSON|AIR|ke furiously express theodolites. |
+5062|159|10|3|50|52957.50|0.09|0.07|A|F|1992-12-25|1992-12-13|1992-12-29|TAKE BACK RETURN|MAIL| the regular, unusual pains. specia|
+5062|161|10|4|18|19100.88|0.03|0.07|R|F|1992-11-04|1992-12-25|1992-11-05|NONE|SHIP|furiously pending requests are ruthles|
+5062|194|8|5|25|27354.75|0.08|0.02|R|F|1992-12-15|1992-11-17|1993-01-01|NONE|TRUCK|uthless excuses ag|
+5063|129|10|1|31|31902.72|0.08|0.01|N|O|1997-06-02|1997-06-20|1997-06-27|NONE|RAIL|kages. ironic, ironic courts wake. carefu|
+5063|174|2|2|43|46189.31|0.04|0.08|N|O|1997-09-14|1997-07-05|1997-10-05|TAKE BACK RETURN|TRUCK|latelets might nod blithely regular requ|
+5063|167|4|3|2|2134.32|0.02|0.03|N|O|1997-06-17|1997-07-27|1997-06-24|COLLECT COD|SHIP|kly regular i|
+5063|135|6|4|18|18632.34|0.08|0.05|N|O|1997-06-02|1997-06-18|1997-06-06|TAKE BACK RETURN|RAIL|refully quiet reques|
+5063|161|8|5|1|1061.16|0.06|0.07|N|O|1997-09-03|1997-06-26|1997-10-03|NONE|FOB|ously special |
+5088|78|6|1|23|22495.61|0.06|0.06|R|F|1993-03-03|1993-03-07|1993-03-08|NONE|FOB|cording to the fluffily expr|
+5088|51|3|2|41|38993.05|0.09|0.00|R|F|1993-01-22|1993-03-07|1993-02-09|TAKE BACK RETURN|TRUCK|ing requests. |
+5088|86|7|3|36|35498.88|0.10|0.05|A|F|1993-04-16|1993-04-03|1993-05-14|NONE|TRUCK|the furiously final deposits. furiously re|
+5088|109|6|4|10|10091.00|0.04|0.05|R|F|1993-04-07|1993-02-06|1993-04-26|NONE|FOB|beans. special requests af|
+5089|158|6|1|4|4232.60|0.05|0.06|R|F|1992-09-18|1992-09-28|1992-10-13|DELIVER IN PERSON|TRUCK|nts sleep blithely |
+5089|162|3|2|20|21243.20|0.00|0.07|R|F|1992-10-10|1992-10-07|1992-11-06|COLLECT COD|RAIL| ironic accounts|
+5089|124|7|3|46|47109.52|0.03|0.04|A|F|1992-11-09|1992-10-13|1992-11-10|TAKE BACK RETURN|RAIL|above the express accounts. exc|
+5089|34|10|4|38|35493.14|0.05|0.03|R|F|1992-11-23|1992-09-11|1992-12-22|TAKE BACK RETURN|SHIP|regular instructions are|
+5090|22|3|1|22|20284.44|0.07|0.00|N|O|1997-05-10|1997-05-25|1997-05-24|TAKE BACK RETURN|TRUCK|ets integrate ironic, regul|
+5090|129|10|2|46|47339.52|0.05|0.00|N|O|1997-04-05|1997-04-14|1997-05-01|COLLECT COD|REG AIR|lose theodolites sleep blit|
+5090|2|9|3|22|19844.00|0.09|0.05|N|O|1997-07-03|1997-04-12|1997-07-26|NONE|REG AIR|ular requests su|
+5090|114|8|4|2|2028.22|0.03|0.06|N|O|1997-04-07|1997-04-23|1997-05-01|TAKE BACK RETURN|AIR|tes. slowly iro|
+5090|48|9|5|21|19908.84|0.10|0.02|N|O|1997-03-29|1997-04-24|1997-04-25|TAKE BACK RETURN|FOB|ly express accounts. slyly even r|
+5090|80|9|6|30|29402.40|0.02|0.03|N|O|1997-05-04|1997-04-14|1997-05-30|COLLECT COD|MAIL|osits nag slyly. fluffily ex|
+5091|78|6|1|50|48903.50|0.05|0.03|N|O|1998-07-21|1998-06-22|1998-07-26|COLLECT COD|REG AIR|al dependencies. r|
+5092|164|1|1|30|31924.80|0.06|0.00|N|O|1995-12-27|1995-12-08|1996-01-09|DELIVER IN PERSON|MAIL|ss, ironic deposits. furiously stea|
+5092|45|4|2|34|32131.36|0.04|0.02|N|O|1995-12-09|1995-12-26|1995-12-21|TAKE BACK RETURN|AIR|ckages nag |
+5092|140|6|3|13|13521.82|0.06|0.01|N|O|1995-11-21|1996-01-05|1995-12-19|TAKE BACK RETURN|SHIP|es detect sly|
+5092|180|1|4|14|15122.52|0.04|0.00|N|O|1996-02-20|1995-11-30|1996-03-20|DELIVER IN PERSON|REG AIR| deposits cajole furiously against the sly|
+5092|186|7|5|42|45619.56|0.01|0.02|N|O|1995-11-06|1996-01-01|1995-12-06|DELIVER IN PERSON|AIR|s use along t|
+5092|178|6|6|11|11859.87|0.03|0.03|N|O|1995-12-02|1995-12-27|1995-12-11|COLLECT COD|MAIL|ly against the slyly silen|
+5092|159|10|7|50|52957.50|0.10|0.03|N|O|1995-11-30|1996-01-14|1995-12-19|NONE|REG AIR|r platelets maintain car|
+5093|168|9|1|40|42726.40|0.05|0.01|R|F|1993-09-16|1993-11-04|1993-10-05|TAKE BACK RETURN|REG AIR|ing pinto beans. quickly bold dependenci|
+5093|74|2|2|15|14611.05|0.01|0.04|A|F|1993-12-02|1993-11-18|1994-01-01|DELIVER IN PERSON|FOB|ly among the unusual foxe|
+5093|151|9|3|31|32585.65|0.00|0.02|R|F|1993-09-22|1993-11-14|1993-09-26|TAKE BACK RETURN|REG AIR| against the|
+5093|156|1|4|37|39077.55|0.04|0.00|A|F|1993-10-26|1993-12-02|1993-10-27|NONE|TRUCK|courts. qui|
+5093|115|2|5|30|30453.30|0.06|0.05|A|F|1993-11-22|1993-11-27|1993-12-14|DELIVER IN PERSON|TRUCK|ithely ironic sheaves use fluff|
+5093|121|6|6|31|31654.72|0.01|0.08|A|F|1993-12-17|1993-11-14|1994-01-02|NONE|SHIP|he final foxes. fluffily ironic |
+5094|143|10|1|19|19819.66|0.03|0.03|R|F|1993-03-31|1993-06-12|1993-04-04|NONE|AIR|ronic foxes. furi|
+5094|108|5|2|23|23186.30|0.05|0.07|R|F|1993-06-13|1993-05-19|1993-07-06|NONE|MAIL|st furiously above the fluffily care|
+5094|92|6|3|11|10912.99|0.04|0.08|A|F|1993-06-25|1993-06-24|1993-07-18|TAKE BACK RETURN|MAIL|s cajole quickly against the furiously ex|
+5094|79|10|4|21|20560.47|0.09|0.08|R|F|1993-07-26|1993-05-03|1993-08-16|NONE|MAIL| blithely furiously final re|
+5095|65|10|1|46|44392.76|0.07|0.01|A|F|1992-06-26|1992-06-25|1992-07-05|COLLECT COD|RAIL|egular instruction|
+5095|106|3|2|2|2012.20|0.07|0.08|A|F|1992-07-09|1992-05-25|1992-07-21|DELIVER IN PERSON|REG AIR|detect car|
+5095|123|8|3|28|28647.36|0.01|0.04|A|F|1992-06-20|1992-06-27|1992-06-22|DELIVER IN PERSON|AIR| into the final courts. ca|
+5095|178|7|4|42|45283.14|0.08|0.08|R|F|1992-05-23|1992-06-01|1992-06-18|COLLECT COD|TRUCK|ccounts. packages could have t|
+5095|166|7|5|9|9595.44|0.10|0.07|R|F|1992-08-14|1992-06-23|1992-08-16|TAKE BACK RETURN|REG AIR|bold theodolites wake about the expr|
+5095|97|8|6|15|14956.35|0.01|0.06|A|F|1992-07-11|1992-07-12|1992-08-09|COLLECT COD|AIR| to the packages wake sly|
+5095|169|10|7|40|42766.40|0.05|0.02|A|F|1992-07-11|1992-06-07|1992-07-26|DELIVER IN PERSON|MAIL|carefully unusual plat|
+5120|133|4|1|28|28927.64|0.06|0.03|N|O|1996-07-20|1996-08-31|1996-08-06|NONE|RAIL| across the silent requests. caref|
+5121|184|5|1|23|24936.14|0.06|0.01|A|F|1992-05-18|1992-06-20|1992-06-02|TAKE BACK RETURN|REG AIR|even courts are blithely ironically |
+5121|111|1|2|45|45499.95|0.08|0.04|A|F|1992-08-13|1992-07-27|1992-09-12|NONE|TRUCK|pecial accounts cajole ca|
+5121|97|10|3|27|26921.43|0.08|0.07|R|F|1992-06-17|1992-06-11|1992-06-19|NONE|MAIL|ly silent theodolit|
+5121|68|7|4|10|9680.60|0.04|0.05|R|F|1992-06-08|1992-07-10|1992-07-02|TAKE BACK RETURN|FOB|e quickly according |
+5121|89|10|5|46|45497.68|0.03|0.02|R|F|1992-05-27|1992-07-19|1992-05-28|TAKE BACK RETURN|FOB|use express foxes. slyly |
+5121|1|8|6|2|1802.00|0.04|0.07|R|F|1992-08-10|1992-06-28|1992-08-11|NONE|FOB| final, regular account|
+5122|183|4|1|28|30329.04|0.03|0.00|N|O|1996-04-20|1996-03-29|1996-04-29|DELIVER IN PERSON|FOB|g the busily ironic accounts boos|
+5122|82|3|2|43|42229.44|0.09|0.03|N|O|1996-05-31|1996-04-12|1996-06-13|NONE|MAIL|ut the carefully special foxes. idle,|
+5122|45|6|3|12|11340.48|0.07|0.03|N|O|1996-04-02|1996-04-27|1996-04-10|DELIVER IN PERSON|AIR|lar instructions |
+5123|26|7|1|13|12038.26|0.08|0.07|N|O|1998-05-17|1998-03-23|1998-06-02|COLLECT COD|MAIL|regular pearls|
+5124|55|7|1|43|41067.15|0.00|0.02|N|O|1997-07-10|1997-05-13|1997-07-31|COLLECT COD|AIR|onic package|
+5124|6|3|2|41|37146.00|0.02|0.06|N|O|1997-07-05|1997-06-29|1997-07-23|DELIVER IN PERSON|RAIL|wake across the|
+5124|125|6|3|44|45105.28|0.03|0.03|N|O|1997-07-13|1997-06-26|1997-08-01|DELIVER IN PERSON|RAIL|equests. carefully unusual d|
+5124|70|9|4|36|34922.52|0.10|0.07|N|O|1997-04-20|1997-07-03|1997-05-04|TAKE BACK RETURN|AIR|r deposits ab|
+5125|6|9|1|38|34428.00|0.09|0.05|N|O|1998-03-20|1998-04-14|1998-03-22|COLLECT COD|MAIL|ily even deposits w|
+5125|160|1|2|5|5300.80|0.08|0.06|N|O|1998-04-07|1998-04-14|1998-04-29|COLLECT COD|RAIL| thinly even pack|
+5126|24|3|1|33|30492.66|0.02|0.02|R|F|1993-02-04|1992-12-23|1993-02-14|NONE|RAIL|ipliers promise furiously whithout the |
+5126|101|6|2|43|43047.30|0.09|0.04|R|F|1993-01-07|1992-12-19|1993-01-16|COLLECT COD|MAIL|e silently. ironic, unusual accounts|
+5126|78|8|3|23|22495.61|0.08|0.01|R|F|1993-01-02|1993-01-02|1993-01-05|COLLECT COD|TRUCK|egular, blithe packages.|
+5127|19|3|1|33|30327.33|0.08|0.04|N|O|1997-03-25|1997-03-02|1997-04-04|NONE|SHIP| bold deposits use carefully a|
+5127|32|8|2|20|18640.60|0.01|0.03|N|O|1997-05-11|1997-02-26|1997-05-12|TAKE BACK RETURN|SHIP|dolites about the final platelets w|
+5152|105|2|1|9|9045.90|0.04|0.03|N|O|1997-04-11|1997-02-11|1997-04-18|COLLECT COD|AIR| cajole furiously alongside of the bo|
+5152|134|10|2|50|51706.50|0.04|0.04|N|O|1997-03-10|1997-02-04|1997-03-15|COLLECT COD|FOB| the final deposits. slyly ironic warth|
+5153|35|1|1|42|39271.26|0.03|0.01|N|O|1995-10-03|1995-11-09|1995-10-11|COLLECT COD|RAIL|re thinly. ironic|
+5153|53|5|2|14|13342.70|0.05|0.05|N|O|1995-11-29|1995-10-21|1995-12-08|TAKE BACK RETURN|TRUCK| slyly daring pinto beans lose blithely fi|
+5153|68|7|3|30|29041.80|0.09|0.01|N|O|1995-11-10|1995-11-14|1995-11-16|DELIVER IN PERSON|AIR|beans sleep bl|
+5153|173|2|4|32|34341.44|0.10|0.08|N|O|1995-12-05|1995-09-25|1996-01-03|DELIVER IN PERSON|MAIL|egular deposits. ironi|
+5153|112|2|5|36|36435.96|0.01|0.03|N|O|1995-12-15|1995-11-08|1995-12-30|COLLECT COD|TRUCK| ironic instru|
+5153|136|2|6|42|43517.46|0.00|0.03|N|O|1995-10-19|1995-11-23|1995-11-06|TAKE BACK RETURN|RAIL|ickly even deposi|
+5154|190|1|1|11|11992.09|0.02|0.05|N|O|1997-08-06|1997-06-30|1997-09-04|NONE|RAIL|luffily bold foxes. final|
+5154|144|5|2|15|15662.10|0.07|0.08|N|O|1997-06-23|1997-07-11|1997-07-11|NONE|AIR|even packages. packages use|
+5155|48|9|1|1|948.04|0.00|0.00|A|F|1994-07-03|1994-08-11|1994-07-29|COLLECT COD|FOB|oze slyly after the silent, regular idea|
+5155|188|9|2|5|5440.90|0.08|0.02|A|F|1994-06-30|1994-08-13|1994-07-15|TAKE BACK RETURN|AIR|ole blithely slyly ironic |
+5155|106|3|3|28|28170.80|0.05|0.02|R|F|1994-07-01|1994-07-19|1994-07-18|COLLECT COD|REG AIR|s cajole. accounts wake. thinly quiet pla|
+5155|79|7|4|39|38183.73|0.09|0.06|A|F|1994-08-25|1994-09-01|1994-09-18|COLLECT COD|TRUCK|l dolphins nag caref|
+5156|117|4|1|21|21359.31|0.06|0.03|N|O|1997-01-01|1997-01-30|1997-01-11|TAKE BACK RETURN|TRUCK|ts detect against the furiously reg|
+5156|148|1|2|36|37733.04|0.04|0.07|N|O|1997-02-12|1996-12-10|1997-03-13|TAKE BACK RETURN|REG AIR| slyly even orbi|
+5157|55|7|1|35|33426.75|0.06|0.08|N|O|1997-07-28|1997-09-30|1997-08-15|TAKE BACK RETURN|REG AIR|to the furiously sil|
+5157|138|9|2|18|18686.34|0.10|0.04|N|O|1997-09-06|1997-10-03|1997-09-19|COLLECT COD|MAIL|y bold deposits nag blithely. final reque|
+5157|167|8|3|15|16007.40|0.09|0.00|N|O|1997-07-27|1997-08-30|1997-08-08|DELIVER IN PERSON|RAIL|cajole. spec|
+5157|59|7|4|25|23976.25|0.00|0.03|N|O|1997-08-24|1997-09-23|1997-08-28|COLLECT COD|REG AIR| packages detect. even requests against th|
+5157|149|8|5|40|41965.60|0.09|0.06|N|O|1997-08-11|1997-08-28|1997-09-01|TAKE BACK RETURN|FOB|ial packages according to |
+5157|150|9|6|26|27303.90|0.10|0.01|N|O|1997-07-28|1997-08-22|1997-08-22|NONE|FOB|nto beans cajole car|
+5157|49|8|7|12|11388.48|0.10|0.08|N|O|1997-10-19|1997-08-07|1997-10-26|NONE|FOB|es. busily |
+5158|45|4|1|43|40636.72|0.10|0.04|N|O|1997-04-10|1997-03-06|1997-04-15|DELIVER IN PERSON|AIR|nusual platelets. slyly even foxes cajole |
+5158|85|6|2|18|17731.44|0.04|0.04|N|O|1997-04-30|1997-03-28|1997-05-12|COLLECT COD|REG AIR|hely regular pa|
+5158|142|9|3|41|42727.74|0.05|0.05|N|O|1997-02-25|1997-03-19|1997-03-03|COLLECT COD|AIR|deposits. quickly special |
+5158|131|7|4|49|50525.37|0.05|0.01|N|O|1997-04-10|1997-03-21|1997-04-30|NONE|REG AIR|r requests sleep q|
+5158|119|9|5|20|20382.20|0.01|0.04|N|O|1997-02-03|1997-02-20|1997-02-08|TAKE BACK RETURN|AIR|latelets use accordin|
+5158|88|9|6|39|38535.12|0.08|0.04|N|O|1997-05-15|1997-04-04|1997-06-02|DELIVER IN PERSON|FOB|lithely fina|
+5158|91|5|7|38|37661.42|0.10|0.05|N|O|1997-05-09|1997-03-03|1997-06-04|NONE|SHIP|uffily regular ac|
+5159|124|7|1|39|39940.68|0.06|0.07|N|O|1996-12-17|1996-12-08|1997-01-10|COLLECT COD|MAIL|re furiously after the pending dolphin|
+5159|17|1|2|46|42182.46|0.01|0.01|N|O|1996-12-15|1996-12-07|1996-12-30|DELIVER IN PERSON|SHIP|s kindle slyly carefully regular|
+5159|152|4|3|22|23147.30|0.01|0.02|N|O|1996-11-06|1996-11-04|1996-11-15|TAKE BACK RETURN|SHIP|he furiously sile|
+5159|52|3|4|5|4760.25|0.10|0.00|N|O|1996-11-25|1996-12-19|1996-12-25|TAKE BACK RETURN|FOB|nal deposits. pending, ironic ideas grow|
+5159|198|10|5|36|39534.84|0.06|0.01|N|O|1997-01-24|1996-11-07|1997-02-08|NONE|REG AIR|packages wake.|
+5184|153|8|1|33|34753.95|0.07|0.04|N|O|1998-08-17|1998-10-16|1998-08-24|TAKE BACK RETURN|AIR|posits. carefully express asympto|
+5184|16|6|2|47|43052.47|0.05|0.01|N|O|1998-11-02|1998-08-19|1998-11-07|COLLECT COD|TRUCK|se. carefully express pinto beans x|
+5184|88|9|3|39|38535.12|0.03|0.06|N|O|1998-10-27|1998-10-17|1998-11-19|DELIVER IN PERSON|FOB|es above the care|
+5184|176|7|4|26|27980.42|0.05|0.08|N|O|1998-11-11|1998-08-26|1998-12-01|TAKE BACK RETURN|TRUCK| packages are|
+5184|124|9|5|19|19458.28|0.06|0.03|N|O|1998-11-15|1998-10-12|1998-11-21|COLLECT COD|REG AIR|refully express platelets sleep carefull|
+5184|80|9|6|49|48023.92|0.02|0.00|N|O|1998-09-18|1998-08-28|1998-10-14|COLLECT COD|FOB|thlessly closely even reque|
+5185|197|1|1|37|40596.03|0.00|0.04|N|O|1997-08-08|1997-09-08|1997-08-14|COLLECT COD|SHIP|gainst the courts dazzle care|
+5185|25|8|2|32|29600.64|0.06|0.00|N|O|1997-08-17|1997-09-30|1997-08-24|TAKE BACK RETURN|REG AIR|ackages. slyly even requests|
+5185|196|9|3|41|44943.79|0.00|0.05|N|O|1997-10-15|1997-10-11|1997-11-02|COLLECT COD|REG AIR|ly blithe deposits. furi|
+5185|96|7|4|30|29882.70|0.09|0.04|N|O|1997-10-17|1997-09-16|1997-10-23|TAKE BACK RETURN|SHIP|ress packages are furiously|
+5185|128|9|5|8|8224.96|0.04|0.00|N|O|1997-08-30|1997-09-02|1997-09-22|COLLECT COD|REG AIR|sts around the slyly perma|
+5185|146|9|6|50|52307.00|0.03|0.04|N|O|1997-10-15|1997-10-19|1997-11-06|TAKE BACK RETURN|FOB|final platelets. ideas sleep careful|
+5186|55|10|1|38|36291.90|0.06|0.02|N|O|1996-11-23|1996-09-21|1996-12-11|DELIVER IN PERSON|MAIL|y ruthless foxes. fluffily |
+5186|91|2|2|31|30723.79|0.09|0.03|N|O|1996-10-19|1996-09-26|1996-10-25|TAKE BACK RETURN|REG AIR| accounts use furiously slyly spe|
+5186|89|10|3|26|25716.08|0.03|0.02|N|O|1996-08-08|1996-10-05|1996-08-21|DELIVER IN PERSON|FOB|capades. accounts sublate. pinto|
+5186|90|1|4|8|7920.72|0.10|0.05|N|O|1996-09-23|1996-09-29|1996-09-30|COLLECT COD|RAIL|y regular notornis k|
+5186|18|2|5|28|25704.28|0.09|0.03|N|O|1996-10-05|1996-10-27|1996-10-19|TAKE BACK RETURN|RAIL|al decoys. blit|
+5186|82|3|6|35|34372.80|0.00|0.05|N|O|1996-10-20|1996-10-12|1996-11-12|TAKE BACK RETURN|RAIL|sly silent pack|
+5186|198|10|7|44|48320.36|0.00|0.08|N|O|1996-09-23|1996-10-14|1996-10-01|NONE|TRUCK|old, final accounts cajole sl|
+5187|11|1|1|49|44639.49|0.04|0.06|N|O|1997-10-20|1997-10-12|1997-10-26|DELIVER IN PERSON|AIR|l, regular platelets instead of the foxes w|
+5187|83|4|2|1|983.08|0.10|0.08|N|O|1997-08-08|1997-08-24|1997-08-22|DELIVER IN PERSON|REG AIR|aggle never bold |
+5188|118|2|1|18|18325.98|0.04|0.03|N|O|1995-06-19|1995-05-19|1995-06-25|DELIVER IN PERSON|AIR|p according to the sometimes regu|
+5188|194|8|2|36|39390.84|0.04|0.02|A|F|1995-03-09|1995-05-16|1995-03-19|NONE|TRUCK|packages? blithely s|
+5188|148|1|3|9|9433.26|0.06|0.08|A|F|1995-05-09|1995-05-22|1995-05-19|TAKE BACK RETURN|REG AIR|r attainments are across the |
+5189|138|9|1|44|45677.72|0.02|0.06|A|F|1994-01-13|1994-02-07|1994-01-21|DELIVER IN PERSON|MAIL|y finally pendin|
+5189|16|3|2|38|34808.38|0.06|0.00|A|F|1994-03-26|1994-01-28|1994-04-20|DELIVER IN PERSON|REG AIR|ideas. idle, final deposits de|
+5189|110|5|3|4|4040.44|0.09|0.02|A|F|1993-12-21|1994-02-23|1994-01-09|DELIVER IN PERSON|REG AIR|. blithely exp|
+5189|94|7|4|49|48710.41|0.05|0.01|R|F|1994-01-22|1994-01-19|1994-02-04|TAKE BACK RETURN|SHIP| requests |
+5189|123|2|5|14|14323.68|0.02|0.03|A|F|1994-01-23|1994-01-05|1994-02-12|DELIVER IN PERSON|REG AIR|unusual packag|
+5189|17|8|6|41|37597.41|0.02|0.06|R|F|1993-12-12|1994-02-05|1994-01-09|DELIVER IN PERSON|RAIL|ial theodolites cajole slyly. slyly unus|
+5190|56|1|1|43|41110.15|0.09|0.06|A|F|1992-08-19|1992-06-10|1992-09-01|DELIVER IN PERSON|FOB|encies use fluffily unusual requests? hoc|
+5190|132|3|2|6|6192.78|0.10|0.08|A|F|1992-08-08|1992-07-14|1992-08-22|COLLECT COD|RAIL|furiously regular pinto beans. furiously i|
+5190|89|10|3|45|44508.60|0.04|0.03|A|F|1992-07-23|1992-06-16|1992-08-04|NONE|FOB|y carefully final ideas. f|
+5191|115|6|1|41|41619.51|0.00|0.08|A|F|1995-02-05|1995-02-27|1995-02-15|DELIVER IN PERSON|AIR|uests! ironic theodolites cajole care|
+5191|168|7|2|40|42726.40|0.02|0.01|A|F|1995-03-31|1995-02-21|1995-04-02|NONE|AIR|nes haggle sometimes. requests eng|
+5191|43|4|3|27|25462.08|0.07|0.05|A|F|1994-12-26|1995-01-24|1995-01-14|DELIVER IN PERSON|RAIL|tructions nag bravely within the re|
+5191|183|4|4|7|7582.26|0.01|0.04|A|F|1995-03-24|1995-01-30|1995-03-30|NONE|RAIL|eposits. express|
+5216|69|10|1|17|16474.02|0.04|0.06|N|O|1997-08-20|1997-11-07|1997-09-14|COLLECT COD|FOB|s according to the accounts bo|
+5217|80|1|1|50|49004.00|0.05|0.02|N|O|1995-12-26|1995-11-21|1996-01-24|DELIVER IN PERSON|MAIL|s. express, express accounts c|
+5217|16|7|2|23|21068.23|0.06|0.07|N|O|1996-01-18|1995-12-24|1996-02-10|COLLECT COD|RAIL|ven ideas. requests amo|
+5217|102|7|3|23|23048.30|0.03|0.02|N|O|1995-11-15|1995-12-17|1995-11-27|DELIVER IN PERSON|FOB|pending packages cajole ne|
+5217|81|2|4|47|46110.76|0.04|0.00|N|O|1995-11-24|1995-12-25|1995-11-25|COLLECT COD|AIR|ronic packages i|
+5218|83|4|1|43|42272.44|0.05|0.04|A|F|1992-08-04|1992-09-12|1992-08-17|DELIVER IN PERSON|SHIP|k theodolites. express, even id|
+5218|125|4|2|33|33828.96|0.06|0.01|R|F|1992-09-16|1992-09-30|1992-09-27|NONE|TRUCK|ronic instructi|
+5219|135|6|1|2|2070.26|0.08|0.00|N|O|1997-06-26|1997-04-29|1997-07-08|TAKE BACK RETURN|FOB| blithely according to the stea|
+5219|119|9|2|20|20382.20|0.05|0.00|N|O|1997-04-20|1997-05-26|1997-05-13|COLLECT COD|FOB|e along the ironic,|
+5220|83|4|1|27|26543.16|0.10|0.04|R|F|1992-09-21|1992-08-29|1992-10-16|DELIVER IN PERSON|RAIL|s cajole blithely furiously iron|
+5221|104|9|1|24|24098.40|0.07|0.03|N|O|1995-10-04|1995-08-11|1995-10-30|COLLECT COD|REG AIR|s pinto beans sleep. sly|
+5221|9|10|2|34|30906.00|0.01|0.05|N|O|1995-09-11|1995-07-17|1995-10-10|COLLECT COD|SHIP|eans. furio|
+5221|180|10|3|16|17282.88|0.04|0.01|N|O|1995-08-29|1995-09-06|1995-09-12|TAKE BACK RETURN|TRUCK|ending request|
+5222|151|3|1|1|1051.15|0.00|0.00|A|F|1994-08-19|1994-07-16|1994-09-08|TAKE BACK RETURN|FOB|idle requests. carefully pending pinto bean|
+5223|45|4|1|24|22680.96|0.00|0.00|A|F|1994-10-03|1994-09-20|1994-10-11|TAKE BACK RETURN|TRUCK|refully bold courts besides the regular,|
+5223|124|9|2|25|25603.00|0.09|0.02|R|F|1994-07-12|1994-08-13|1994-08-01|NONE|FOB|y express ideas impress|
+5223|6|3|3|19|17214.00|0.04|0.01|R|F|1994-10-28|1994-08-26|1994-10-31|COLLECT COD|REG AIR|ntly. furiously even excuses a|
+5223|130|9|4|40|41205.20|0.01|0.04|R|F|1994-10-01|1994-09-18|1994-10-28|COLLECT COD|SHIP|kly pending |
+5248|81|2|1|39|38262.12|0.05|0.03|N|O|1995-08-10|1995-07-04|1995-09-09|TAKE BACK RETURN|MAIL|yly even accounts. spe|
+5248|138|9|2|45|46715.85|0.00|0.06|A|F|1995-05-09|1995-07-12|1995-05-27|DELIVER IN PERSON|FOB|. bold, pending foxes h|
+5249|50|9|1|31|29451.55|0.07|0.03|A|F|1994-11-21|1994-11-19|1994-12-08|NONE|REG AIR|f the excuses. furiously fin|
+5249|31|7|2|44|40965.32|0.05|0.00|A|F|1994-12-28|1994-11-29|1994-12-29|TAKE BACK RETURN|MAIL|ole furiousl|
+5249|32|8|3|13|12116.39|0.09|0.00|R|F|1994-09-27|1994-10-20|1994-10-05|DELIVER IN PERSON|SHIP|ites. finally exp|
+5249|146|3|4|29|30338.06|0.00|0.05|A|F|1994-09-16|1994-11-03|1994-10-06|NONE|TRUCK| players. f|
+5249|158|6|5|12|12697.80|0.01|0.08|R|F|1994-12-28|1994-11-07|1995-01-15|COLLECT COD|MAIL|press depths could have to sleep carefu|
+5250|44|3|1|2|1888.08|0.08|0.04|N|O|1995-08-09|1995-10-10|1995-08-13|COLLECT COD|AIR|its. final pinto|
+5250|192|6|2|27|29489.13|0.10|0.05|N|O|1995-10-24|1995-09-03|1995-11-18|COLLECT COD|TRUCK|l forges are. furiously unusual pin|
+5251|139|10|1|36|37408.68|0.10|0.01|N|O|1995-07-16|1995-07-05|1995-07-28|DELIVER IN PERSON|FOB|slowly! bli|
+5252|141|10|1|13|13534.82|0.02|0.01|N|O|1996-03-02|1996-05-10|1996-03-11|NONE|FOB|boost fluffily across |
+5252|139|5|2|39|40526.07|0.06|0.05|N|O|1996-05-17|1996-04-23|1996-05-23|COLLECT COD|AIR|gular requests.|
+5252|195|9|3|9|9856.71|0.09|0.03|N|O|1996-05-30|1996-05-03|1996-06-26|TAKE BACK RETURN|RAIL|x. slyly special depos|
+5252|87|8|4|48|47379.84|0.01|0.06|N|O|1996-04-17|1996-03-19|1996-05-03|COLLECT COD|AIR|bold requests. furious|
+5252|68|5|5|24|23233.44|0.04|0.05|N|O|1996-05-11|1996-04-17|1996-05-12|COLLECT COD|REG AIR|posits after the fluffi|
+5252|3|10|6|41|37023.00|0.02|0.03|N|O|1996-03-16|1996-04-18|1996-03-17|NONE|TRUCK|ording to the blithely express somas sho|
+5253|31|2|1|35|32586.05|0.02|0.00|N|O|1995-07-23|1995-06-12|1995-08-03|DELIVER IN PERSON|AIR|ven deposits. careful|
+5253|150|7|2|38|39905.70|0.02|0.06|N|O|1995-08-03|1995-06-14|1995-08-27|DELIVER IN PERSON|REG AIR|onic dependencies are furiou|
+5253|14|5|3|9|8226.09|0.03|0.08|N|F|1995-06-08|1995-05-12|1995-06-23|DELIVER IN PERSON|REG AIR|lyly express deposits use furiou|
+5253|166|1|4|25|26654.00|0.04|0.03|A|F|1995-05-21|1995-06-13|1995-06-09|COLLECT COD|TRUCK|urts. even theodoli|
+5254|111|2|1|35|35388.85|0.01|0.07|A|F|1992-07-28|1992-09-05|1992-08-07|COLLECT COD|REG AIR|ntegrate carefully among the pending|
+5254|135|6|2|10|10351.30|0.05|0.04|A|F|1992-11-19|1992-10-20|1992-12-15|COLLECT COD|SHIP| accounts. silent deposit|
+5254|192|5|3|32|34950.08|0.00|0.08|A|F|1992-08-10|1992-09-21|1992-08-16|NONE|RAIL|ts impress closely furi|
+5254|163|2|4|45|47842.20|0.05|0.06|A|F|1992-11-11|1992-09-01|1992-12-07|COLLECT COD|REG AIR| wake. blithely silent excuse|
+5254|29|8|5|23|21367.46|0.02|0.06|A|F|1992-08-16|1992-09-05|1992-09-15|COLLECT COD|RAIL|lyly regular accounts. furiously pendin|
+5254|158|3|6|34|35977.10|0.09|0.02|R|F|1992-08-29|1992-10-16|1992-09-20|TAKE BACK RETURN|RAIL| furiously above the furiously |
+5254|20|7|7|9|8280.18|0.09|0.03|R|F|1992-07-29|1992-10-15|1992-08-20|TAKE BACK RETURN|REG AIR| wake blithely fluff|
+5255|131|7|1|2|2062.26|0.04|0.08|N|O|1996-09-27|1996-10-04|1996-10-04|DELIVER IN PERSON|RAIL|ajole blithely fluf|
+5255|172|10|2|30|32165.10|0.04|0.08|N|O|1996-09-20|1996-08-18|1996-10-09|TAKE BACK RETURN|AIR| to the silent requests cajole b|
+5255|130|3|3|41|42235.33|0.09|0.03|N|O|1996-08-21|1996-09-24|1996-09-05|COLLECT COD|FOB|tect blithely against t|
+5280|97|9|1|16|15953.44|0.02|0.03|N|O|1998-03-29|1998-01-28|1998-04-03|TAKE BACK RETURN|SHIP| foxes are furiously. theodoli|
+5280|176|5|2|46|49503.82|0.01|0.06|N|O|1998-01-04|1998-01-21|1998-02-03|TAKE BACK RETURN|FOB|efully carefully pen|
+5281|114|1|1|37|37522.07|0.05|0.02|N|O|1995-11-10|1996-01-31|1995-11-22|DELIVER IN PERSON|MAIL|ronic dependencies. fluffily final p|
+5281|105|2|2|38|38193.80|0.00|0.05|N|O|1996-02-17|1995-12-19|1996-02-29|NONE|RAIL|n asymptotes could wake about th|
+5281|127|2|3|23|23623.76|0.08|0.00|N|O|1995-12-30|1996-01-26|1996-01-23|COLLECT COD|REG AIR|. final theodolites cajole. ironic p|
+5281|87|8|4|48|47379.84|0.03|0.05|N|O|1996-01-31|1995-12-23|1996-02-08|TAKE BACK RETURN|REG AIR|ss the furiously |
+5281|43|10|5|33|31120.32|0.01|0.07|N|O|1996-03-01|1995-12-28|1996-03-05|COLLECT COD|RAIL|ly brave foxes. bold deposits above the |
+5282|118|2|1|36|36651.96|0.05|0.02|N|O|1998-05-20|1998-04-10|1998-06-14|DELIVER IN PERSON|TRUCK|re slyly accor|
+5282|52|10|2|32|30465.60|0.02|0.05|N|O|1998-03-01|1998-03-31|1998-03-03|NONE|FOB|onic deposits; furiou|
+5282|58|10|3|28|26825.40|0.03|0.06|N|O|1998-05-06|1998-04-24|1998-05-30|COLLECT COD|SHIP|fily final instruc|
+5283|5|2|1|20|18100.00|0.05|0.02|A|F|1994-09-16|1994-08-03|1994-10-15|TAKE BACK RETURN|TRUCK|al deposits? blithely even pinto beans|
+5283|186|7|2|1|1086.18|0.10|0.08|R|F|1994-06-20|1994-08-03|1994-07-01|COLLECT COD|FOB|deposits within the furio|
+5284|173|1|1|16|17170.72|0.04|0.02|N|O|1995-08-17|1995-08-23|1995-08-26|DELIVER IN PERSON|TRUCK|unts detect furiously even d|
+5284|44|7|2|24|22656.96|0.03|0.08|N|O|1995-10-21|1995-08-23|1995-10-27|COLLECT COD|AIR| haggle according |
+5285|193|5|1|31|33888.89|0.08|0.00|A|F|1994-04-17|1994-04-05|1994-05-09|NONE|RAIL|ubt. quickly blithe |
+5285|31|2|2|37|34448.11|0.09|0.02|R|F|1994-02-26|1994-02-18|1994-03-27|NONE|SHIP|uffily regu|
+5285|34|10|3|24|22416.72|0.02|0.04|A|F|1994-04-19|1994-04-03|1994-04-25|DELIVER IN PERSON|FOB|ess packages. quick, even deposits snooze b|
+5285|43|2|4|12|11316.48|0.05|0.06|A|F|1994-04-22|1994-04-07|1994-05-19|NONE|AIR| deposits-- quickly bold requests hag|
+5285|71|2|5|1|971.07|0.03|0.05|R|F|1994-03-14|1994-02-20|1994-04-10|COLLECT COD|TRUCK|e fluffily about the slyly special pa|
+5285|146|7|6|1|1046.14|0.06|0.01|R|F|1994-02-08|1994-04-02|1994-02-17|COLLECT COD|SHIP|ing deposits integra|
+5286|199|1|1|1|1099.19|0.01|0.07|N|O|1997-11-25|1997-11-07|1997-12-17|COLLECT COD|REG AIR|ly! furiously final pack|
+5286|97|1|2|7|6979.63|0.06|0.05|N|O|1997-10-23|1997-12-10|1997-11-20|TAKE BACK RETURN|RAIL|y express instructions sleep carefull|
+5286|16|10|3|3|2748.03|0.06|0.08|N|O|1997-12-04|1997-11-06|1997-12-09|COLLECT COD|MAIL|re fluffily|
+5286|40|6|4|6|5640.24|0.04|0.03|N|O|1997-10-15|1997-12-05|1997-11-12|COLLECT COD|RAIL|y special a|
+5286|186|7|5|38|41274.84|0.07|0.05|N|O|1997-11-29|1997-11-26|1997-12-15|TAKE BACK RETURN|SHIP|fluffily. special, ironic deposit|
+5286|138|9|6|24|24915.12|0.08|0.00|N|O|1997-09-27|1997-12-21|1997-09-30|COLLECT COD|TRUCK|s. express foxes of the|
+5287|39|10|1|32|30048.96|0.01|0.01|A|F|1994-01-29|1994-01-27|1994-02-08|NONE|RAIL|heodolites haggle caref|
+5312|61|6|1|27|25948.62|0.04|0.08|A|F|1995-04-20|1995-04-09|1995-04-25|COLLECT COD|SHIP|tructions cajol|
+5312|2|5|2|43|38786.00|0.05|0.08|A|F|1995-03-24|1995-05-07|1995-03-28|NONE|TRUCK|ly unusual|
+5313|17|1|1|34|31178.34|0.10|0.02|N|O|1997-08-07|1997-08-12|1997-08-24|DELIVER IN PERSON|FOB|ccording to the blithely final account|
+5313|13|10|2|17|15521.17|0.00|0.02|N|O|1997-09-02|1997-08-20|1997-09-07|NONE|SHIP|uests wake|
+5313|112|9|3|47|47569.17|0.06|0.08|N|O|1997-08-12|1997-08-18|1997-08-13|TAKE BACK RETURN|RAIL|pinto beans across the |
+5313|197|1|4|16|17555.04|0.08|0.00|N|O|1997-10-04|1997-08-02|1997-10-25|COLLECT COD|REG AIR|ckages wake carefully aga|
+5313|72|1|5|30|29162.10|0.06|0.08|N|O|1997-06-27|1997-07-18|1997-06-30|NONE|SHIP|nding packages use|
+5313|120|7|6|21|21422.52|0.05|0.05|N|O|1997-09-26|1997-09-02|1997-10-18|COLLECT COD|FOB|he blithely regular packages. quickly|
+5314|118|9|1|10|10181.10|0.07|0.05|N|O|1995-09-26|1995-07-24|1995-10-19|DELIVER IN PERSON|RAIL|latelets haggle final|
+5314|125|6|2|16|16401.92|0.00|0.04|N|O|1995-09-25|1995-07-08|1995-10-17|COLLECT COD|SHIP|hely unusual packages acc|
+5315|35|1|1|12|11220.36|0.08|0.06|R|F|1992-12-18|1993-01-16|1993-01-10|NONE|AIR|ccounts. furiously ironi|
+5315|179|10|2|39|42087.63|0.00|0.06|R|F|1992-11-09|1992-12-29|1992-12-07|NONE|SHIP|ly alongside of the ca|
+5316|108|1|1|29|29234.90|0.10|0.05|R|F|1994-03-28|1994-04-29|1994-04-09|DELIVER IN PERSON|REG AIR|ckly unusual foxes bo|
+5316|136|7|2|31|32120.03|0.00|0.08|A|F|1994-04-01|1994-04-21|1994-04-12|DELIVER IN PERSON|MAIL|s. deposits cajole around t|
+5317|82|3|1|29|28480.32|0.02|0.06|A|F|1994-11-28|1994-11-27|1994-12-16|COLLECT COD|FOB|oss the carefull|
+5317|171|2|2|18|19281.06|0.06|0.06|A|F|1995-01-02|1994-10-29|1995-01-16|NONE|RAIL|g to the blithely p|
+5317|120|4|3|37|37744.44|0.09|0.00|R|F|1994-09-15|1994-10-24|1994-09-23|NONE|TRUCK|totes nag theodolites. pend|
+5317|67|6|4|50|48353.00|0.09|0.01|A|F|1994-10-17|1994-10-25|1994-11-03|NONE|REG AIR|cajole furiously. accounts use quick|
+5317|95|8|5|19|18906.71|0.07|0.07|R|F|1994-12-15|1994-10-18|1994-12-27|NONE|MAIL|onic requests boost bli|
+5317|115|9|6|48|48725.28|0.01|0.03|A|F|1994-09-19|1994-11-25|1994-10-03|COLLECT COD|MAIL|ts about the packages cajole furio|
+5317|169|4|7|30|32074.80|0.07|0.07|A|F|1994-10-13|1994-10-31|1994-10-28|NONE|AIR|cross the attainments. slyly |
+5318|61|6|1|13|12493.78|0.10|0.04|R|F|1993-07-15|1993-06-25|1993-08-13|COLLECT COD|REG AIR|ly silent ideas. ideas haggle among the |
+5318|180|1|2|26|28084.68|0.00|0.04|R|F|1993-07-07|1993-05-23|1993-07-28|COLLECT COD|SHIP|al, express foxes. bold requests sleep alwa|
+5318|7|10|3|37|33559.00|0.07|0.05|A|F|1993-07-09|1993-06-22|1993-07-21|COLLECT COD|SHIP|ickly final deposi|
+5318|142|5|4|31|32306.34|0.01|0.04|R|F|1993-07-28|1993-05-06|1993-08-06|DELIVER IN PERSON|REG AIR|requests must sleep slyly quickly|
+5319|150|9|1|31|32554.65|0.04|0.07|N|O|1996-03-26|1996-03-07|1996-04-24|COLLECT COD|TRUCK|d carefully about the courts. fluffily spe|
+5319|44|3|2|39|36817.56|0.09|0.05|N|O|1996-05-17|1996-03-14|1996-06-11|NONE|TRUCK|unts. furiously silent|
+5344|19|3|1|6|5514.06|0.07|0.01|N|O|1998-08-04|1998-09-03|1998-08-11|TAKE BACK RETURN|REG AIR|ithely about the pending plate|
+5344|79|9|2|37|36225.59|0.03|0.07|N|O|1998-10-09|1998-07-26|1998-11-08|NONE|TRUCK|thely express packages|
+5344|67|8|3|26|25143.56|0.02|0.06|N|O|1998-08-27|1998-08-22|1998-09-24|NONE|AIR|furiously pending, silent multipliers.|
+5344|39|10|4|21|19719.63|0.03|0.01|N|O|1998-08-31|1998-09-06|1998-09-02|NONE|MAIL|xes. furiously even pinto beans sleep f|
+5345|83|4|1|3|2949.24|0.05|0.01|N|O|1997-12-10|1997-10-03|1998-01-05|COLLECT COD|SHIP|ites wake carefully unusual |
+5345|146|5|2|2|2092.28|0.10|0.02|N|O|1997-11-18|1997-10-12|1997-12-08|NONE|MAIL|ut the slyly specia|
+5345|192|5|3|46|50240.74|0.06|0.04|N|O|1997-10-06|1997-09-27|1997-10-18|COLLECT COD|REG AIR|slyly special deposits. fin|
+5345|114|4|4|37|37522.07|0.01|0.01|N|O|1997-11-01|1997-10-09|1997-11-26|DELIVER IN PERSON|AIR| along the ironically fina|
+5345|34|10|5|22|20548.66|0.02|0.02|N|O|1997-08-27|1997-11-22|1997-09-10|TAKE BACK RETURN|MAIL|leep slyly regular fox|
+5346|149|8|1|21|22031.94|0.07|0.08|R|F|1994-03-11|1994-03-07|1994-04-04|DELIVER IN PERSON|RAIL|integrate blithely a|
+5346|192|5|2|13|14198.47|0.04|0.04|A|F|1994-02-03|1994-02-05|1994-02-09|COLLECT COD|TRUCK|y. fluffily bold accounts grow. furio|
+5346|109|2|3|7|7063.70|0.08|0.05|A|F|1994-01-30|1994-03-26|1994-01-31|DELIVER IN PERSON|SHIP|equests use carefully care|
+5346|162|3|4|35|37175.60|0.06|0.02|A|F|1994-02-09|1994-03-01|1994-02-14|TAKE BACK RETURN|FOB|nic excuses cajole entic|
+5346|121|2|5|25|25528.00|0.05|0.06|R|F|1993-12-28|1994-03-19|1994-01-09|TAKE BACK RETURN|REG AIR|he ironic ideas are boldly slyly ironi|
+5346|33|9|6|6|5598.18|0.08|0.04|R|F|1994-03-01|1994-02-04|1994-03-09|NONE|REG AIR|escapades sleep furiously beside the |
+5346|80|9|7|41|40183.28|0.05|0.04|R|F|1994-01-10|1994-02-15|1994-01-26|TAKE BACK RETURN|REG AIR|fully close instructi|
+5347|83|4|1|48|47187.84|0.04|0.08|A|F|1995-02-25|1995-04-26|1995-03-26|NONE|SHIP|equests are slyly. blithely regu|
+5347|124|3|2|47|48133.64|0.02|0.01|N|F|1995-06-05|1995-03-29|1995-06-28|COLLECT COD|AIR|across the slyly bol|
+5347|23|2|3|34|31382.68|0.06|0.00|A|F|1995-05-18|1995-04-04|1995-06-02|DELIVER IN PERSON|SHIP| pending deposits. fluffily regular senti|
+5347|40|1|4|4|3760.16|0.06|0.03|A|F|1995-03-24|1995-04-03|1995-04-01|NONE|SHIP|ldly pending asymptotes ki|
+5347|131|2|5|21|21653.73|0.08|0.04|R|F|1995-04-01|1995-04-16|1995-04-23|NONE|SHIP|sly slyly final requests. careful|
+5347|56|1|6|6|5736.30|0.06|0.02|A|F|1995-04-11|1995-04-14|1995-05-02|NONE|TRUCK|lly unusual ideas. sl|
+5347|50|7|7|18|17100.90|0.01|0.01|N|F|1995-05-24|1995-05-07|1995-06-19|NONE|FOB|he ideas among the requests |
+5348|69|4|1|21|20350.26|0.10|0.04|N|O|1997-12-11|1997-12-24|1997-12-28|NONE|REG AIR| regular theodolites haggle car|
+5348|156|1|2|31|32740.65|0.07|0.02|N|O|1998-01-04|1997-12-09|1998-01-17|COLLECT COD|RAIL|are finally|
+5348|17|8|3|16|14672.16|0.06|0.08|N|O|1998-02-28|1997-12-25|1998-03-12|DELIVER IN PERSON|AIR|uriously thin pinto beans |
+5348|20|4|4|7|6440.14|0.04|0.00|N|O|1998-01-29|1997-12-20|1998-02-10|DELIVER IN PERSON|RAIL|even foxes. epitap|
+5348|2|5|5|37|33374.00|0.06|0.07|N|O|1997-12-01|1998-02-02|1997-12-07|NONE|FOB|y according to the carefully pending acco|
+5348|143|10|6|14|14603.96|0.06|0.05|N|O|1997-12-16|1998-01-12|1997-12-24|COLLECT COD|FOB|en pinto beans. somas cajo|
+5349|156|7|1|19|20066.85|0.06|0.01|N|O|1996-09-11|1996-11-18|1996-09-22|TAKE BACK RETURN|FOB|endencies use whithout the special |
+5349|168|3|2|14|14954.24|0.06|0.00|N|O|1996-11-07|1996-11-17|1996-11-20|TAKE BACK RETURN|TRUCK|fully regular |
+5349|4|5|3|6|5424.00|0.10|0.01|N|O|1996-12-30|1996-10-08|1997-01-01|DELIVER IN PERSON|MAIL|inal deposits affix carefully|
+5350|122|3|1|19|19420.28|0.02|0.06|R|F|1993-10-20|1993-11-15|1993-11-17|DELIVER IN PERSON|RAIL|romise slyly alongsi|
+5350|191|4|2|44|48012.36|0.04|0.06|R|F|1993-10-30|1993-11-23|1993-11-25|DELIVER IN PERSON|AIR|p above the ironic, pending dep|
+5350|54|9|3|12|11448.60|0.10|0.04|A|F|1994-01-30|1993-11-21|1994-02-15|COLLECT COD|REG AIR| cajole. even instructions haggle. blithe|
+5350|155|10|4|7|7386.05|0.08|0.00|R|F|1993-10-19|1993-12-28|1993-11-04|NONE|SHIP|alongside of th|
+5350|129|10|5|27|27786.24|0.07|0.04|A|F|1993-11-25|1993-12-27|1993-12-08|COLLECT COD|TRUCK|es. blithe theodolites haggl|
+5351|7|2|1|36|32652.00|0.06|0.05|N|O|1998-07-27|1998-07-06|1998-08-25|NONE|MAIL|ss the ironic, regular asymptotes cajole |
+5351|33|9|2|47|43852.41|0.04|0.01|N|O|1998-05-30|1998-08-08|1998-06-23|DELIVER IN PERSON|REG AIR|s. grouches cajole. sile|
+5351|106|3|3|2|2012.20|0.00|0.02|N|O|1998-05-12|1998-07-15|1998-05-24|NONE|TRUCK|g accounts wake furiously slyly even dolph|
+5376|61|6|1|42|40364.52|0.10|0.04|A|F|1994-09-20|1994-08-30|1994-09-29|TAKE BACK RETURN|REG AIR|y even asymptotes. courts are unusual pa|
+5376|91|4|2|44|43607.96|0.05|0.02|R|F|1994-08-30|1994-08-05|1994-09-07|COLLECT COD|AIR|ithe packages detect final theodolites. f|
+5376|65|6|3|18|17371.08|0.02|0.08|A|F|1994-10-29|1994-09-13|1994-11-01|COLLECT COD|MAIL| accounts boo|
+5377|79|8|1|40|39162.80|0.00|0.04|N|O|1997-05-21|1997-06-15|1997-05-26|DELIVER IN PERSON|AIR|lithely ironic theodolites are care|
+5377|30|3|2|17|15810.51|0.09|0.00|N|O|1997-07-05|1997-05-25|1997-07-22|COLLECT COD|RAIL|dencies. carefully regular re|
+5377|103|8|3|23|23071.30|0.07|0.08|N|O|1997-06-26|1997-07-13|1997-07-08|COLLECT COD|RAIL| silent wa|
+5377|104|7|4|12|12049.20|0.05|0.07|N|O|1997-05-08|1997-06-15|1997-05-15|DELIVER IN PERSON|MAIL| ironic, final|
+5377|173|3|5|27|28975.59|0.08|0.02|N|O|1997-07-11|1997-06-12|1997-08-08|TAKE BACK RETURN|MAIL|press theodolites. e|
+5378|155|3|1|39|41150.85|0.07|0.04|R|F|1992-11-25|1992-12-22|1992-12-02|COLLECT COD|AIR|ts are quickly around the|
+5378|62|9|2|46|44254.76|0.01|0.04|A|F|1993-02-17|1993-01-20|1993-02-26|COLLECT COD|REG AIR|into beans sleep. fu|
+5378|10|7|3|18|16380.18|0.02|0.03|R|F|1992-11-25|1992-12-21|1992-12-10|COLLECT COD|FOB|onic accounts was bold, |
+5379|199|1|1|40|43967.60|0.01|0.08|N|O|1995-10-01|1995-10-19|1995-10-30|COLLECT COD|MAIL|carefully final accounts haggle blithely. |
+5380|182|3|1|14|15150.52|0.10|0.01|N|O|1997-12-18|1997-12-03|1998-01-06|NONE|RAIL|final platelets.|
+5380|147|6|2|10|10471.40|0.09|0.05|N|O|1997-11-24|1998-01-10|1997-12-21|COLLECT COD|AIR|refully pending deposits. special, even t|
+5380|184|5|3|40|43367.20|0.02|0.08|N|O|1997-12-30|1997-11-27|1998-01-09|DELIVER IN PERSON|SHIP|ar asymptotes. blithely r|
+5380|66|3|4|6|5796.36|0.09|0.05|N|O|1997-11-15|1998-01-08|1997-12-11|COLLECT COD|MAIL|es. fluffily brave accounts across t|
+5380|107|8|5|48|48340.80|0.04|0.03|N|O|1997-12-01|1997-12-28|1997-12-05|DELIVER IN PERSON|FOB|encies haggle car|
+5381|188|9|1|37|40262.66|0.04|0.01|A|F|1993-04-08|1993-04-07|1993-04-12|DELIVER IN PERSON|SHIP|ly final deposits print carefully. unusua|
+5381|111|8|2|48|48533.28|0.04|0.03|R|F|1993-04-22|1993-04-17|1993-05-14|TAKE BACK RETURN|FOB|luffily spec|
+5381|192|3|3|13|14198.47|0.08|0.03|R|F|1993-05-09|1993-04-26|1993-05-25|NONE|FOB|s after the f|
+5381|168|3|4|17|18158.72|0.05|0.05|R|F|1993-05-25|1993-04-14|1993-06-17|NONE|MAIL|ckly final requests haggle qui|
+5381|63|8|5|49|47189.94|0.06|0.02|R|F|1993-05-08|1993-04-07|1993-06-03|NONE|FOB| accounts. regular, regula|
+5381|132|3|6|33|34060.29|0.10|0.00|A|F|1993-04-09|1993-04-03|1993-04-22|DELIVER IN PERSON|SHIP|ly special deposits |
+5381|44|3|7|31|29265.24|0.04|0.05|A|F|1993-04-10|1993-03-22|1993-04-13|TAKE BACK RETURN|MAIL|the carefully expre|
+5382|153|8|1|34|35807.10|0.03|0.03|R|F|1992-02-22|1992-02-18|1992-03-02|DELIVER IN PERSON|FOB|gular accounts. even accounts integrate|
+5382|55|3|2|13|12415.65|0.09|0.06|A|F|1992-01-16|1992-03-12|1992-02-06|NONE|MAIL|eodolites. final foxes |
+5382|149|10|3|3|3147.42|0.10|0.06|A|F|1992-03-22|1992-03-06|1992-04-19|TAKE BACK RETURN|AIR|efully unusua|
+5382|62|9|4|20|19241.20|0.08|0.02|A|F|1992-03-26|1992-02-17|1992-04-15|DELIVER IN PERSON|FOB|carefully regular accounts. slyly ev|
+5382|177|8|5|14|15080.38|0.02|0.02|A|F|1992-04-05|1992-04-05|1992-05-04|TAKE BACK RETURN|FOB| brave platelets. ev|
+5382|180|9|6|6|6481.08|0.02|0.01|A|F|1992-03-07|1992-04-02|1992-03-18|TAKE BACK RETURN|FOB|y final foxes by the sl|
+5382|105|2|7|48|48244.80|0.05|0.05|A|F|1992-02-14|1992-03-19|1992-02-25|DELIVER IN PERSON|REG AIR|nts integrate quickly ca|
+5383|96|7|1|12|11953.08|0.04|0.00|N|O|1995-07-02|1995-08-16|1995-08-01|TAKE BACK RETURN|AIR|y regular instructi|
+5408|102|7|1|2|2004.20|0.07|0.04|R|F|1992-08-21|1992-10-03|1992-08-28|DELIVER IN PERSON|MAIL|cross the dolphins h|
+5408|118|2|2|35|35633.85|0.04|0.05|R|F|1992-10-02|1992-10-17|1992-10-13|TAKE BACK RETURN|AIR|thely ironic requests alongside of the sl|
+5408|76|6|3|34|33186.38|0.10|0.02|A|F|1992-10-22|1992-08-25|1992-11-16|DELIVER IN PERSON|TRUCK|requests detect blithely a|
+5408|54|2|4|48|45794.40|0.04|0.05|R|F|1992-09-30|1992-08-27|1992-10-27|NONE|TRUCK|. furiously regular |
+5408|183|4|5|8|8665.44|0.03|0.07|A|F|1992-10-24|1992-09-06|1992-11-03|NONE|AIR|thely regular hocke|
+5409|194|8|1|27|29543.13|0.01|0.02|A|F|1992-02-14|1992-03-18|1992-02-23|DELIVER IN PERSON|AIR|eodolites |
+5409|104|5|2|38|38155.80|0.01|0.02|A|F|1992-03-17|1992-03-29|1992-04-13|NONE|REG AIR|onic, regular accounts! blithely even|
+5409|141|10|3|17|17699.38|0.07|0.00|A|F|1992-01-13|1992-04-05|1992-01-20|DELIVER IN PERSON|AIR|cross the sil|
+5409|1|8|4|9|8109.00|0.07|0.03|A|F|1992-02-15|1992-04-02|1992-02-28|DELIVER IN PERSON|AIR| unusual, unusual reques|
+5409|159|10|5|37|39188.55|0.06|0.04|R|F|1992-05-07|1992-02-10|1992-05-20|DELIVER IN PERSON|FOB|ously regular packages. packages|
+5409|64|3|6|14|13496.84|0.03|0.08|R|F|1992-02-14|1992-03-26|1992-02-29|DELIVER IN PERSON|AIR|osits cajole furiously|
+5410|117|8|1|48|48821.28|0.04|0.08|N|O|1998-09-27|1998-09-11|1998-10-01|TAKE BACK RETURN|AIR| about the slyly even courts. quickly regul|
+5410|105|8|2|41|41209.10|0.01|0.07|N|O|1998-08-25|1998-10-20|1998-09-01|DELIVER IN PERSON|REG AIR|sly. slyly ironic theodolites|
+5410|29|4|3|40|37160.80|0.07|0.08|N|O|1998-11-17|1998-10-02|1998-11-27|COLLECT COD|TRUCK|iously special accounts are along th|
+5410|50|7|4|8|7600.40|0.05|0.04|N|O|1998-09-12|1998-10-22|1998-09-22|DELIVER IN PERSON|TRUCK|ly. fluffily ironic platelets alon|
+5411|96|9|1|17|16933.53|0.05|0.01|N|O|1997-07-22|1997-07-14|1997-07-30|TAKE BACK RETURN|REG AIR| slyly slyly even deposits. carefully b|
+5411|113|7|2|10|10131.10|0.08|0.01|N|O|1997-07-19|1997-08-04|1997-07-26|TAKE BACK RETURN|MAIL|nding, special foxes unw|
+5411|56|7|3|5|4780.25|0.10|0.01|N|O|1997-09-12|1997-08-03|1997-09-23|DELIVER IN PERSON|FOB| bold, ironic theodo|
+5411|129|8|4|15|15436.80|0.08|0.05|N|O|1997-07-01|1997-07-15|1997-07-07|COLLECT COD|RAIL|attainments sleep slyly ironic|
+5411|4|5|5|19|17176.00|0.05|0.08|N|O|1997-05-25|1997-07-30|1997-06-19|COLLECT COD|RAIL|ial accounts according to the f|
+5412|54|9|1|2|1908.10|0.03|0.07|N|O|1998-04-14|1998-04-02|1998-04-19|TAKE BACK RETURN|REG AIR| sleep above the furiou|
+5412|66|1|2|48|46370.88|0.01|0.08|N|O|1998-02-22|1998-03-28|1998-03-18|TAKE BACK RETURN|TRUCK|s. slyly final packages cajole blithe|
+5412|74|2|3|31|30196.17|0.05|0.08|N|O|1998-03-23|1998-04-17|1998-04-10|NONE|SHIP|t the accounts detect slyly about the c|
+5412|97|10|4|26|25924.34|0.02|0.08|N|O|1998-01-22|1998-04-19|1998-02-17|NONE|AIR| the blithel|
+5413|126|7|1|48|49253.76|0.02|0.08|N|O|1998-01-25|1997-11-20|1998-02-22|COLLECT COD|SHIP| theodolites. furiously ironic instr|
+5413|142|9|2|37|38559.18|0.02|0.07|N|O|1997-12-08|1998-01-01|1997-12-13|COLLECT COD|TRUCK|usly bold instructions affix idly unusual, |
+5413|111|8|3|36|36399.96|0.02|0.07|N|O|1997-12-12|1997-11-28|1997-12-25|NONE|TRUCK|ular, regular ideas mold! final requests|
+5413|110|3|4|22|22222.42|0.02|0.08|N|O|1997-11-10|1997-11-24|1997-11-22|DELIVER IN PERSON|FOB|posits. quick|
+5413|189|10|5|5|5445.90|0.10|0.01|N|O|1997-11-28|1997-11-24|1997-12-05|NONE|RAIL|tes are al|
+5413|190|1|6|32|34886.08|0.02|0.03|N|O|1997-10-28|1998-01-03|1997-11-10|NONE|TRUCK|refully special package|
+5413|31|7|7|32|29792.96|0.06|0.07|N|O|1997-10-23|1997-12-09|1997-11-17|NONE|TRUCK|he quickly ironic ideas. slyly ironic ide|
+5414|68|9|1|40|38722.40|0.07|0.06|R|F|1993-04-07|1993-05-18|1993-04-23|COLLECT COD|AIR|ts are evenly across|
+5414|123|8|2|48|49109.76|0.06|0.07|R|F|1993-06-08|1993-05-14|1993-07-06|DELIVER IN PERSON|FOB| silent dolphins; fluffily regular tithe|
+5414|35|1|3|23|21505.69|0.10|0.00|A|F|1993-07-22|1993-05-26|1993-08-08|COLLECT COD|MAIL|e bold, express dolphins. spec|
+5414|133|4|4|15|15496.95|0.06|0.08|R|F|1993-05-18|1993-06-09|1993-05-27|DELIVER IN PERSON|REG AIR|e slyly about the carefully regula|
+5414|9|2|5|19|17271.00|0.01|0.05|R|F|1993-04-06|1993-05-12|1993-05-02|DELIVER IN PERSON|RAIL|ffily silent theodolites na|
+5414|98|1|6|28|27946.52|0.10|0.05|A|F|1993-03-27|1993-06-04|1993-04-07|TAKE BACK RETURN|SHIP|ts sleep sl|
+5415|102|5|1|44|44092.40|0.00|0.06|A|F|1992-08-19|1992-10-26|1992-09-17|TAKE BACK RETURN|TRUCK| requests. unusual theodolites sleep agains|
+5415|31|7|2|16|14896.48|0.08|0.00|A|F|1992-09-29|1992-09-12|1992-10-10|NONE|AIR|pinto beans haggle furiously|
+5415|102|7|3|6|6012.60|0.10|0.03|A|F|1992-10-28|1992-09-09|1992-11-20|COLLECT COD|RAIL|ges around the fur|
+5415|16|7|4|43|39388.43|0.01|0.02|R|F|1992-11-17|1992-09-14|1992-12-14|DELIVER IN PERSON|SHIP|yly blithely stealthy deposits. carefu|
+5415|161|6|5|11|11672.76|0.00|0.01|R|F|1992-11-22|1992-10-19|1992-12-10|DELIVER IN PERSON|SHIP|gle among t|
+5415|144|1|6|46|48030.44|0.03|0.03|R|F|1992-08-25|1992-09-10|1992-09-22|DELIVER IN PERSON|REG AIR|ve the fluffily |
+5415|153|4|7|11|11584.65|0.08|0.06|A|F|1992-08-21|1992-09-04|1992-08-23|NONE|TRUCK|unts maintain carefully unusual|
+5440|115|2|1|3|3045.33|0.02|0.08|N|O|1997-02-18|1997-02-28|1997-03-15|NONE|SHIP|y. accounts haggle along the blit|
+5441|164|1|1|3|3192.48|0.00|0.02|R|F|1994-08-12|1994-10-14|1994-09-01|TAKE BACK RETURN|REG AIR|are. unusual, |
+5441|131|2|2|49|50525.37|0.02|0.03|A|F|1994-09-23|1994-09-22|1994-10-22|NONE|FOB|ording to the furio|
+5441|144|3|3|33|34456.62|0.09|0.02|R|F|1994-10-09|1994-10-06|1994-10-30|DELIVER IN PERSON|TRUCK|ges. final instruction|
+5441|67|4|4|47|45451.82|0.07|0.08|R|F|1994-11-19|1994-10-16|1994-12-16|TAKE BACK RETURN|FOB|ounts wake slyly about the express instr|
+5442|42|5|1|16|15072.64|0.00|0.00|N|O|1998-04-12|1998-03-03|1998-05-04|TAKE BACK RETURN|RAIL|r packages. accounts haggle dependencies. f|
+5442|88|9|2|45|44463.60|0.08|0.01|N|O|1998-03-30|1998-02-24|1998-04-18|TAKE BACK RETURN|AIR|old slyly after |
+5442|61|8|3|12|11532.72|0.01|0.08|N|O|1998-04-15|1998-03-18|1998-05-05|DELIVER IN PERSON|TRUCK|fully final|
+5442|158|9|4|21|22221.15|0.07|0.06|N|O|1998-03-13|1998-02-19|1998-04-06|COLLECT COD|MAIL|ffily furiously ironic theodolites. furio|
+5442|16|7|5|25|22900.25|0.04|0.00|N|O|1998-03-29|1998-02-13|1998-04-13|TAKE BACK RETURN|REG AIR|ake furiously. slyly express th|
+5442|144|3|6|26|27147.64|0.08|0.07|N|O|1998-03-21|1998-03-21|1998-03-25|TAKE BACK RETURN|AIR|have to sleep furiously bold ideas. blith|
+5443|178|9|1|14|15094.38|0.02|0.00|N|O|1996-10-27|1996-11-11|1996-11-21|DELIVER IN PERSON|RAIL|s after the regular, regular deposits hag|
+5443|72|3|2|39|37910.73|0.03|0.07|N|O|1996-11-01|1996-11-30|1996-11-19|NONE|RAIL|gage carefully across the furiously|
+5443|160|5|3|25|26504.00|0.05|0.00|N|O|1996-12-07|1997-01-08|1997-01-05|NONE|FOB|use carefully above the pinto bea|
+5443|191|4|4|6|6547.14|0.05|0.02|N|O|1996-11-17|1996-12-03|1996-11-30|TAKE BACK RETURN|AIR|p fluffily foxe|
+5443|83|4|5|40|39323.20|0.03|0.03|N|O|1997-01-28|1996-12-10|1997-02-13|NONE|FOB|n courts. special re|
+5444|186|7|1|21|22809.78|0.01|0.07|A|F|1995-04-11|1995-04-25|1995-04-21|DELIVER IN PERSON|RAIL|ar packages haggle above th|
+5444|43|6|2|40|37721.60|0.05|0.08|N|O|1995-07-09|1995-04-25|1995-07-19|COLLECT COD|TRUCK|ously bold ideas. instructions wake slyl|
+5444|150|9|3|40|42006.00|0.08|0.01|A|F|1995-04-06|1995-05-08|1995-05-06|DELIVER IN PERSON|AIR| even packages.|
+5444|59|4|4|33|31648.65|0.05|0.04|N|O|1995-06-24|1995-04-24|1995-07-13|DELIVER IN PERSON|SHIP|ut the courts cajole blithely excuses|
+5444|171|9|5|21|22494.57|0.04|0.00|R|F|1995-05-05|1995-05-25|1995-05-29|TAKE BACK RETURN|REG AIR|aves serve sly|
+5444|20|7|6|21|19320.42|0.07|0.01|A|F|1995-03-30|1995-05-01|1995-03-31|COLLECT COD|AIR|furiously even theodolites.|
+5445|90|1|1|33|32672.97|0.08|0.07|A|F|1993-10-21|1993-10-14|1993-10-28|DELIVER IN PERSON|REG AIR|ests. final instructions|
+5445|131|2|2|12|12373.56|0.09|0.08|R|F|1993-11-02|1993-09-05|1993-11-26|COLLECT COD|FOB| slyly pending pinto beans was slyly al|
+5445|103|8|3|46|46142.60|0.04|0.07|A|F|1993-10-06|1993-09-15|1993-10-28|DELIVER IN PERSON|RAIL|old depend|
+5445|149|10|4|10|10491.40|0.08|0.06|A|F|1993-09-16|1993-10-05|1993-10-01|NONE|TRUCK|ncies abou|
+5445|13|10|5|14|12782.14|0.00|0.02|R|F|1993-11-19|1993-10-18|1993-12-07|NONE|RAIL| requests. bravely i|
+5446|190|1|1|27|29435.13|0.05|0.07|R|F|1994-07-21|1994-08-25|1994-08-17|TAKE BACK RETURN|RAIL|ously across the quic|
+5447|99|1|1|31|30971.79|0.09|0.03|N|O|1996-07-14|1996-05-07|1996-07-17|COLLECT COD|SHIP| foxes sleep. blithely unusual accounts det|
+5472|59|10|1|27|25894.35|0.09|0.06|A|F|1993-08-04|1993-07-07|1993-09-03|COLLECT COD|TRUCK|fily pending attainments. unus|
+5472|68|3|2|28|27105.68|0.00|0.03|A|F|1993-07-28|1993-05-28|1993-08-11|TAKE BACK RETURN|FOB|ffily pendin|
+5472|178|7|3|45|48517.65|0.06|0.02|R|F|1993-06-05|1993-05-14|1993-06-10|NONE|TRUCK| idle packages. furi|
+5472|184|5|4|37|40114.66|0.07|0.05|R|F|1993-06-15|1993-07-03|1993-07-09|DELIVER IN PERSON|RAIL|egrate carefully dependencies. |
+5472|75|6|5|40|39002.80|0.02|0.05|A|F|1993-04-13|1993-07-04|1993-05-04|NONE|REG AIR|e requests detect furiously. ruthlessly un|
+5472|167|2|6|39|41619.24|0.02|0.03|R|F|1993-04-18|1993-07-10|1993-05-12|TAKE BACK RETURN|MAIL|uriously carefully |
+5472|15|5|7|1|915.01|0.03|0.02|A|F|1993-04-14|1993-06-28|1993-04-16|NONE|RAIL|s use furiou|
+5473|48|5|1|9|8532.36|0.03|0.07|R|F|1992-06-03|1992-05-30|1992-06-09|TAKE BACK RETURN|AIR| excuses sleep blithely! regular dep|
+5473|70|1|2|27|26191.89|0.01|0.03|A|F|1992-04-06|1992-04-26|1992-04-29|TAKE BACK RETURN|MAIL|the deposits. warthogs wake fur|
+5473|15|5|3|33|30195.33|0.09|0.00|R|F|1992-05-18|1992-06-10|1992-06-13|TAKE BACK RETURN|MAIL|efully above the even, |
+5474|184|5|1|38|41198.84|0.01|0.08|A|F|1992-07-15|1992-07-16|1992-07-20|NONE|REG AIR| slyly beneath |
+5474|94|8|2|10|9940.90|0.06|0.00|R|F|1992-08-08|1992-08-10|1992-08-24|TAKE BACK RETURN|TRUCK|pinto bean|
+5474|48|1|3|31|29389.24|0.00|0.08|R|F|1992-08-02|1992-07-12|1992-08-04|NONE|TRUCK|the furiously express ideas. speci|
+5474|90|1|4|46|45544.14|0.03|0.04|A|F|1992-06-07|1992-07-11|1992-06-22|NONE|TRUCK|nstructions. furio|
+5475|183|4|1|10|10831.80|0.09|0.08|N|O|1996-07-19|1996-08-22|1996-07-23|COLLECT COD|AIR|ding to the deposits wake fina|
+5476|48|1|1|13|12324.52|0.01|0.04|N|O|1997-12-27|1997-12-08|1997-12-29|COLLECT COD|TRUCK|iously special ac|
+5476|20|4|2|17|15640.34|0.10|0.01|N|O|1998-02-02|1998-01-28|1998-02-14|COLLECT COD|FOB|ng dependencies until the f|
+5477|80|8|1|20|19601.60|0.03|0.01|N|O|1998-03-21|1998-02-09|1998-04-07|TAKE BACK RETURN|SHIP|platelets about the ironic|
+5477|77|7|2|21|20518.47|0.03|0.00|N|O|1998-01-28|1998-02-15|1998-02-24|TAKE BACK RETURN|SHIP|blate slyly. silent|
+5477|134|5|3|31|32058.03|0.04|0.01|N|O|1998-01-11|1998-01-30|1998-02-04|DELIVER IN PERSON|MAIL| special Tiresias cajole furiously. pending|
+5477|193|6|4|16|17491.04|0.00|0.01|N|O|1998-03-07|1998-03-12|1998-04-06|COLLECT COD|RAIL|regular, s|
+5477|96|9|5|23|22910.07|0.00|0.06|N|O|1998-01-04|1998-02-23|1998-01-24|NONE|REG AIR|telets wake blithely ab|
+5477|121|6|6|19|19401.28|0.10|0.03|N|O|1998-02-03|1998-01-30|1998-03-04|TAKE BACK RETURN|MAIL|ost carefully packages.|
+5478|8|9|1|39|35412.00|0.09|0.06|N|O|1996-08-19|1996-06-25|1996-09-08|DELIVER IN PERSON|SHIP|s. furiously |
+5478|2|5|2|47|42394.00|0.10|0.01|N|O|1996-08-15|1996-07-12|1996-08-31|NONE|RAIL| instructions; slyly even accounts hagg|
+5478|119|3|3|25|25477.75|0.09|0.07|N|O|1996-06-08|1996-07-12|1996-07-07|NONE|TRUCK|unusual, pending requests haggle accoun|
+5479|138|4|1|50|51906.50|0.02|0.02|A|F|1993-12-24|1994-02-14|1994-01-18|DELIVER IN PERSON|MAIL|ironic gifts. even dependencies sno|
+5479|104|5|2|19|19077.90|0.05|0.03|A|F|1994-01-22|1994-03-07|1994-02-11|DELIVER IN PERSON|SHIP|arefully bo|
+5504|68|5|1|4|3872.24|0.10|0.07|A|F|1993-04-30|1993-03-01|1993-05-22|DELIVER IN PERSON|AIR|into beans boost. |
+5504|177|8|2|7|7540.19|0.03|0.05|R|F|1993-04-25|1993-03-15|1993-05-06|NONE|TRUCK|packages detect furiously express reques|
+5504|160|2|3|29|30744.64|0.05|0.03|A|F|1993-01-28|1993-02-13|1993-02-27|NONE|SHIP|ajole carefully. care|
+5505|25|8|1|43|39775.86|0.07|0.01|N|O|1997-12-30|1997-11-28|1998-01-09|TAKE BACK RETURN|TRUCK|y alongside of the special requests.|
+5505|182|3|2|33|35711.94|0.05|0.08|N|O|1998-01-11|1997-11-11|1998-01-30|TAKE BACK RETURN|AIR|ithely unusual excuses integrat|
+5505|155|10|3|10|10551.50|0.06|0.01|N|O|1997-10-28|1997-11-27|1997-10-29|DELIVER IN PERSON|AIR| furiously special asym|
+5505|40|1|4|18|16920.72|0.04|0.04|N|O|1997-10-25|1997-12-12|1997-10-30|TAKE BACK RETURN|RAIL| to the quickly express pac|
+5505|162|9|5|46|48859.36|0.05|0.00|N|O|1998-01-06|1997-11-04|1998-02-04|TAKE BACK RETURN|SHIP|usly ironic dependencies haggle across |
+5506|140|1|1|2|2080.28|0.00|0.03|R|F|1994-02-04|1994-01-13|1994-02-17|COLLECT COD|MAIL|onic theodolites are fluffil|
+5506|160|1|2|6|6360.96|0.07|0.06|R|F|1994-02-21|1994-01-30|1994-02-27|DELIVER IN PERSON|MAIL|hely according to the furiously unusua|
+5507|10|5|1|23|20930.23|0.05|0.04|N|O|1998-09-04|1998-07-04|1998-09-18|TAKE BACK RETURN|AIR|ously slow packages poach whithout the|
+5507|138|9|2|48|49830.24|0.03|0.01|N|O|1998-08-03|1998-08-10|1998-08-24|DELIVER IN PERSON|AIR|yly idle deposits. final, final fox|
+5507|45|2|3|4|3780.16|0.04|0.06|N|O|1998-06-06|1998-07-02|1998-06-27|TAKE BACK RETURN|RAIL|into beans are|
+5507|67|6|4|22|21275.32|0.07|0.01|N|O|1998-07-08|1998-08-10|1998-07-22|DELIVER IN PERSON|TRUCK|gular ideas. carefully unu|
+5507|132|3|5|48|49542.24|0.06|0.01|N|O|1998-07-21|1998-07-15|1998-07-31|DELIVER IN PERSON|SHIP|uriously regular acc|
+5508|117|7|1|4|4068.44|0.10|0.04|N|O|1996-09-01|1996-08-02|1996-09-17|COLLECT COD|AIR|fluffily about the even |
+5509|197|10|1|3|3291.57|0.03|0.02|A|F|1994-06-14|1994-05-11|1994-06-17|NONE|SHIP| quickly fin|
+5509|99|3|2|17|16984.53|0.03|0.07|R|F|1994-07-01|1994-06-30|1994-07-31|COLLECT COD|AIR|ccounts wake ar|
+5509|93|7|3|30|29792.70|0.04|0.04|A|F|1994-07-23|1994-06-01|1994-08-08|NONE|AIR|counts haggle pinto beans. furiously |
+5509|100|3|4|45|45004.50|0.00|0.07|A|F|1994-07-24|1994-05-28|1994-08-20|COLLECT COD|AIR|counts sleep. f|
+5509|156|8|5|35|36965.25|0.04|0.03|A|F|1994-04-17|1994-06-29|1994-04-24|COLLECT COD|RAIL|c accounts. ca|
+5510|16|6|1|8|7328.08|0.01|0.01|A|F|1993-03-16|1993-03-29|1993-03-24|DELIVER IN PERSON|FOB|n packages boost sly|
+5510|20|10|2|46|42320.92|0.02|0.07|A|F|1993-03-12|1993-02-09|1993-03-19|NONE|TRUCK|silent packages cajole doggedly regular |
+5510|162|3|3|47|49921.52|0.03|0.01|A|F|1993-01-20|1993-03-25|1993-02-15|DELIVER IN PERSON|SHIP|riously even requests. slyly bold accou|
+5510|24|7|4|29|26796.58|0.09|0.08|A|F|1993-02-28|1993-03-28|1993-03-12|COLLECT COD|AIR|lithely fluffily ironic req|
+5511|165|4|1|16|17042.56|0.10|0.05|A|F|1995-02-02|1995-01-06|1995-02-19|TAKE BACK RETURN|RAIL|thely bold theodolites |
+5511|165|10|2|31|33019.96|0.09|0.01|A|F|1995-02-23|1995-01-21|1995-03-02|COLLECT COD|REG AIR|gular excuses. fluffily even pinto beans c|
+5511|128|3|3|49|50377.88|0.05|0.05|R|F|1994-12-21|1995-01-27|1994-12-26|NONE|REG AIR|bout the requests. theodolites |
+5511|122|7|4|4|4088.48|0.08|0.02|R|F|1994-12-28|1995-01-16|1995-01-24|TAKE BACK RETURN|RAIL|lphins. carefully blithe de|
+5511|9|2|5|23|20907.00|0.10|0.07|A|F|1995-03-11|1995-01-21|1995-03-27|TAKE BACK RETURN|TRUCK|ing dugouts |
+5511|188|9|6|5|5440.90|0.08|0.05|R|F|1994-12-29|1995-01-16|1995-01-24|DELIVER IN PERSON|MAIL|al theodolites. blithely final de|
+5511|143|2|7|23|23992.22|0.02|0.07|R|F|1995-02-03|1995-01-05|1995-02-18|COLLECT COD|REG AIR|ully deposits. warthogs hagg|
+5536|90|1|1|14|13861.26|0.08|0.06|N|O|1998-05-18|1998-05-08|1998-06-05|COLLECT COD|MAIL|instructions sleep |
+5536|62|1|2|20|19241.20|0.08|0.04|N|O|1998-05-08|1998-05-10|1998-05-31|DELIVER IN PERSON|REG AIR|equests mo|
+5536|197|9|3|35|38401.65|0.07|0.02|N|O|1998-05-19|1998-06-08|1998-06-05|NONE|MAIL|c, final theo|
+5536|9|10|4|30|27270.00|0.05|0.07|N|O|1998-04-15|1998-05-23|1998-05-03|NONE|FOB|arefully regular theodolites according|
+5536|141|2|5|11|11452.54|0.02|0.08|N|O|1998-03-18|1998-05-12|1998-03-28|TAKE BACK RETURN|FOB| snooze furio|
+5537|45|8|1|10|9450.40|0.05|0.08|N|O|1997-01-13|1996-12-25|1997-01-28|TAKE BACK RETURN|AIR| sleep carefully slyly bold depos|
+5537|150|9|2|15|15752.25|0.07|0.04|N|O|1997-01-13|1996-12-25|1997-01-27|COLLECT COD|AIR|eposits. permanently pending packag|
+5537|151|6|3|39|40994.85|0.03|0.00|N|O|1996-12-17|1996-11-08|1997-01-15|COLLECT COD|REG AIR| slyly bold packages are. qu|
+5537|97|1|4|38|37889.42|0.01|0.00|N|O|1996-11-06|1996-11-23|1996-11-12|TAKE BACK RETURN|MAIL|s above the carefully ironic deposits |
+5538|154|9|1|42|44274.30|0.05|0.00|A|F|1994-04-08|1994-03-17|1994-05-05|DELIVER IN PERSON|REG AIR|vely ironic accounts. furiously unusual acc|
+5538|121|2|2|4|4084.48|0.02|0.03|R|F|1994-03-21|1994-02-17|1994-04-11|TAKE BACK RETURN|REG AIR|ithely along the c|
+5538|19|3|3|38|34922.38|0.03|0.06|R|F|1994-03-17|1994-02-11|1994-04-10|TAKE BACK RETURN|FOB|ular pinto beans. silent ideas above |
+5538|78|6|4|9|8802.63|0.00|0.01|R|F|1993-12-26|1994-01-31|1994-01-03|TAKE BACK RETURN|REG AIR|encies across the blithely fina|
+5539|65|10|1|42|40532.52|0.10|0.08|A|F|1994-09-29|1994-09-17|1994-10-20|DELIVER IN PERSON|RAIL|ons across the carefully si|
+5540|181|2|1|42|45409.56|0.02|0.08|N|O|1996-11-12|1996-12-18|1996-12-05|TAKE BACK RETURN|RAIL|ss dolphins haggle |
+5540|102|3|2|2|2004.20|0.06|0.02|N|O|1996-12-12|1997-01-09|1996-12-25|DELIVER IN PERSON|MAIL|nic asymptotes could hav|
+5540|64|3|3|19|18317.14|0.01|0.03|N|O|1997-02-06|1996-11-18|1997-02-20|DELIVER IN PERSON|SHIP| slyly slyl|
+5540|72|10|4|24|23329.68|0.10|0.05|N|O|1997-01-09|1996-12-02|1997-01-23|COLLECT COD|FOB|deposits! ironic depths may engage-- b|
+5541|96|8|1|39|38847.51|0.08|0.05|N|O|1997-11-17|1997-12-27|1997-12-11|TAKE BACK RETURN|RAIL|ding theodolites haggle against the slyly |
+5542|189|10|1|6|6535.08|0.03|0.01|N|O|1996-06-14|1996-05-28|1996-07-11|DELIVER IN PERSON|TRUCK| foxes doubt. theodolites ca|
+5543|143|10|1|14|14603.96|0.02|0.03|R|F|1993-10-09|1993-12-09|1993-10-21|NONE|SHIP|ecial reque|
+5543|162|7|2|22|23367.52|0.04|0.00|A|F|1993-11-06|1993-11-02|1993-12-02|DELIVER IN PERSON|SHIP|instructions. deposits use quickly. ir|
+5543|67|6|3|3|2901.18|0.08|0.05|R|F|1993-11-18|1993-11-05|1993-12-17|NONE|FOB|ress, even |
+5543|147|10|4|8|8377.12|0.05|0.01|R|F|1993-10-28|1993-11-18|1993-11-07|NONE|SHIP|totes? iron|
+5543|80|1|5|32|31362.56|0.03|0.03|R|F|1993-10-04|1993-11-14|1993-11-03|DELIVER IN PERSON|AIR|ully around the |
+5543|184|5|6|1|1084.18|0.03|0.07|A|F|1993-10-29|1993-11-11|1993-11-23|TAKE BACK RETURN|FOB|uriously. slyly|
+5543|129|8|7|39|40135.68|0.06|0.00|R|F|1993-10-07|1993-11-15|1993-10-28|TAKE BACK RETURN|MAIL|l excuses are furiously. slyly unusual requ|
+5568|166|5|1|50|53308.00|0.05|0.05|N|O|1995-07-14|1995-09-04|1995-08-03|COLLECT COD|TRUCK|furious ide|
+5568|44|5|2|18|16992.72|0.01|0.08|N|O|1995-08-19|1995-08-18|1995-08-24|DELIVER IN PERSON|SHIP|structions haggle. carefully regular |
+5568|89|10|3|35|34617.80|0.08|0.07|N|O|1995-09-17|1995-09-04|1995-10-14|NONE|SHIP|lyly. blit|
+5569|29|4|1|25|23225.50|0.10|0.03|R|F|1993-06-29|1993-07-18|1993-07-05|TAKE BACK RETURN|TRUCK| deposits cajole above|
+5569|58|10|2|26|24909.30|0.09|0.06|A|F|1993-08-21|1993-07-22|1993-09-09|DELIVER IN PERSON|MAIL|pitaphs. ironic req|
+5569|55|3|3|48|45842.40|0.02|0.03|R|F|1993-06-16|1993-06-15|1993-07-09|COLLECT COD|SHIP|the fluffily|
+5569|147|10|4|19|19895.66|0.10|0.08|R|F|1993-07-30|1993-06-21|1993-08-13|TAKE BACK RETURN|FOB| detect ca|
+5569|59|1|5|15|14385.75|0.02|0.06|A|F|1993-06-29|1993-07-06|1993-07-05|DELIVER IN PERSON|MAIL|lithely bold requests boost fur|
+5570|161|6|1|37|39262.92|0.08|0.02|N|O|1996-08-29|1996-10-23|1996-09-11|NONE|RAIL|y ironic pin|
+5570|39|10|2|15|14085.45|0.09|0.02|N|O|1996-10-04|1996-10-05|1996-10-28|TAKE BACK RETURN|REG AIR|beans nag slyly special, regular pack|
+5570|60|1|3|29|27841.74|0.02|0.05|N|O|1996-10-12|1996-10-20|1996-11-08|TAKE BACK RETURN|SHIP|he silent, enticing requests.|
+5571|154|2|1|32|33732.80|0.05|0.01|R|F|1992-12-25|1993-03-01|1993-01-23|NONE|FOB| the blithely even packages nag q|
+5571|94|8|2|31|30816.79|0.09|0.07|R|F|1993-01-05|1993-01-18|1993-02-04|DELIVER IN PERSON|SHIP|uffily even accounts. quickly re|
+5571|92|6|3|18|17857.62|0.10|0.05|R|F|1993-03-11|1993-02-28|1993-04-03|COLLECT COD|REG AIR|uests haggle furiously pending d|
+5572|22|1|1|24|22128.48|0.08|0.08|R|F|1994-10-30|1994-10-02|1994-11-27|TAKE BACK RETURN|MAIL|ests cajole. evenly ironic exc|
+5572|172|10|2|27|28948.59|0.03|0.04|A|F|1994-08-29|1994-09-10|1994-08-30|TAKE BACK RETURN|SHIP| accounts. carefully final accoun|
+5572|87|8|3|19|18754.52|0.10|0.00|A|F|1994-08-12|1994-10-07|1994-09-01|DELIVER IN PERSON|RAIL|es. final, final requests wake blithely ag|
+5572|135|1|4|46|47615.98|0.02|0.01|R|F|1994-09-08|1994-10-14|1994-10-01|NONE|REG AIR|ully regular platelet|
+5572|24|3|5|34|31416.68|0.10|0.08|R|F|1994-10-22|1994-08-16|1994-11-08|NONE|TRUCK|asymptotes integrate. s|
+5572|101|2|6|14|14015.40|0.04|0.05|A|F|1994-11-02|1994-09-20|1994-11-03|COLLECT COD|RAIL|he fluffily express packages. fluffily fina|
+5572|26|1|7|24|22224.48|0.01|0.05|R|F|1994-09-26|1994-09-04|1994-10-22|DELIVER IN PERSON|FOB| beans. foxes sleep fluffily across th|
+5573|21|6|1|32|29472.64|0.05|0.07|N|O|1996-09-30|1996-10-25|1996-10-15|DELIVER IN PERSON|RAIL|egular depths haggl|
+5573|50|3|2|2|1900.10|0.01|0.07|N|O|1996-08-26|1996-09-29|1996-09-04|COLLECT COD|TRUCK| even foxes. specia|
+5573|11|8|3|46|41906.46|0.06|0.01|N|O|1996-11-04|1996-10-02|1996-11-15|DELIVER IN PERSON|MAIL|s haggle qu|
+5573|169|4|4|43|45973.88|0.10|0.03|N|O|1996-10-22|1996-11-03|1996-11-02|TAKE BACK RETURN|FOB| furiously pending packages against |
+5573|138|9|5|43|44639.59|0.05|0.04|N|O|1996-09-09|1996-09-24|1996-09-28|COLLECT COD|AIR| bold package|
+5574|185|6|1|46|49918.28|0.02|0.07|A|F|1992-06-20|1992-04-19|1992-07-11|NONE|FOB|arefully express requests wake furiousl|
+5574|33|4|2|21|19593.63|0.05|0.08|A|F|1992-03-22|1992-04-26|1992-04-16|TAKE BACK RETURN|TRUCK|fully final dugouts. express foxes nag |
+5574|119|6|3|27|27515.97|0.10|0.06|R|F|1992-05-08|1992-05-19|1992-06-05|TAKE BACK RETURN|REG AIR|ecial realms. furiously entici|
+5574|94|6|4|14|13917.26|0.09|0.01|R|F|1992-05-20|1992-04-09|1992-05-23|COLLECT COD|REG AIR| use slyly carefully special requests? slyl|
+5574|85|6|5|19|18716.52|0.05|0.03|A|F|1992-05-28|1992-04-24|1992-06-11|TAKE BACK RETURN|REG AIR|old deposits int|
+5575|58|10|1|7|6706.35|0.01|0.07|N|O|1995-10-01|1995-09-30|1995-10-06|NONE|FOB|s. slyly pending theodolites prin|
+5575|31|7|2|23|21413.69|0.04|0.02|N|O|1995-10-26|1995-10-09|1995-11-13|TAKE BACK RETURN|AIR|enticingly final requests. ironically|
+5575|63|8|3|16|15408.96|0.00|0.07|N|O|1995-08-17|1995-10-14|1995-08-30|NONE|RAIL|jole boldly beyond the final as|
+5575|110|1|4|7|7070.77|0.01|0.04|N|O|1995-10-15|1995-09-14|1995-10-18|DELIVER IN PERSON|RAIL|special requests. final, final |
+5600|187|8|1|34|36964.12|0.02|0.00|N|O|1997-03-22|1997-04-05|1997-04-09|TAKE BACK RETURN|MAIL|ly above the stealthy ideas. permane|
+5600|8|5|2|19|17252.00|0.00|0.01|N|O|1997-04-10|1997-03-24|1997-04-16|TAKE BACK RETURN|TRUCK|dencies. carefully p|
+5601|38|4|1|29|27202.87|0.09|0.04|A|F|1992-04-06|1992-02-24|1992-04-29|DELIVER IN PERSON|TRUCK| ironic ideas. final|
+5601|164|1|2|45|47887.20|0.10|0.07|A|F|1992-03-25|1992-04-03|1992-04-04|TAKE BACK RETURN|MAIL|ts-- blithely final accounts cajole. carefu|
+5601|73|4|3|38|36976.66|0.07|0.00|A|F|1992-01-08|1992-03-01|1992-01-09|TAKE BACK RETURN|REG AIR|ter the evenly final deposit|
+5601|148|5|4|12|12577.68|0.03|0.01|A|F|1992-02-27|1992-03-16|1992-03-27|COLLECT COD|TRUCK|ep carefully a|
+5602|176|4|1|9|9685.53|0.08|0.03|N|O|1997-10-14|1997-09-14|1997-11-11|COLLECT COD|FOB|lar foxes; quickly ironic ac|
+5602|62|7|2|31|29823.86|0.04|0.08|N|O|1997-09-04|1997-10-24|1997-09-07|NONE|TRUCK|rate fluffily regular platelets. blithel|
+5602|68|5|3|30|29041.80|0.04|0.00|N|O|1997-09-20|1997-10-25|1997-10-12|DELIVER IN PERSON|FOB|e slyly even packages. careful|
+5603|98|2|1|50|49904.50|0.03|0.02|A|F|1992-10-06|1992-08-20|1992-10-08|COLLECT COD|SHIP|final theodolites accor|
+5603|116|6|2|49|49789.39|0.06|0.05|A|F|1992-06-24|1992-07-28|1992-07-01|DELIVER IN PERSON|FOB|fully silent requests. carefully fin|
+5603|32|8|3|49|45669.47|0.00|0.02|R|F|1992-10-07|1992-07-21|1992-10-10|DELIVER IN PERSON|TRUCK|nic, pending dependencies print|
+5604|136|7|1|44|45589.72|0.05|0.01|N|O|1998-08-06|1998-07-08|1998-09-04|NONE|RAIL|efully ironi|
+5604|136|2|2|49|50770.37|0.10|0.00|N|O|1998-05-02|1998-07-07|1998-05-20|NONE|FOB|ove the regula|
+5604|78|8|3|10|9780.70|0.07|0.05|N|O|1998-08-03|1998-06-23|1998-08-04|COLLECT COD|SHIP|ly final realms wake blit|
+5605|87|8|1|50|49354.00|0.08|0.05|N|O|1996-08-26|1996-10-15|1996-09-04|TAKE BACK RETURN|RAIL|instructions sleep carefully ironic req|
+5605|151|2|2|7|7358.05|0.06|0.01|N|O|1996-12-13|1996-10-13|1996-12-15|TAKE BACK RETURN|FOB|lowly special courts nag among the furi|
+5605|173|2|3|3|3219.51|0.01|0.02|N|O|1996-09-01|1996-10-02|1996-09-20|TAKE BACK RETURN|AIR|posits. accounts boost. t|
+5605|55|3|4|45|42977.25|0.00|0.01|N|O|1996-09-05|1996-10-04|1996-09-13|COLLECT COD|FOB|ly unusual instructions. carefully ironic p|
+5605|70|7|5|39|37832.73|0.00|0.08|N|O|1996-12-13|1996-11-03|1996-12-24|DELIVER IN PERSON|REG AIR|cial deposits. theodolites w|
+5605|166|7|6|29|30918.64|0.08|0.08|N|O|1996-09-19|1996-10-22|1996-10-06|DELIVER IN PERSON|SHIP| quickly. quickly pending sen|
+5606|174|5|1|47|50485.99|0.10|0.04|N|O|1996-12-23|1997-01-31|1997-01-20|DELIVER IN PERSON|REG AIR|carefully final foxes. pending, final|
+5606|92|3|2|34|33731.06|0.09|0.06|N|O|1997-02-23|1997-02-08|1997-03-09|TAKE BACK RETURN|REG AIR|uses. slyly final |
+5606|127|8|3|46|47247.52|0.04|0.00|N|O|1997-03-11|1997-01-13|1997-03-23|DELIVER IN PERSON|REG AIR|ter the ironic accounts. even, ironic depos|
+5606|82|3|4|30|29462.40|0.08|0.04|N|O|1997-02-06|1997-01-26|1997-02-16|DELIVER IN PERSON|REG AIR| nag always. blithely express packages |
+5606|7|2|5|25|22675.00|0.06|0.00|N|O|1996-12-25|1997-01-12|1997-01-11|TAKE BACK RETURN|AIR|breach about the furiously bold |
+5606|154|5|6|3|3162.45|0.04|0.06|N|O|1997-01-11|1997-01-04|1997-02-08|COLLECT COD|AIR| sauternes. asympto|
+5606|74|5|7|46|44807.22|0.07|0.01|N|O|1997-02-01|1997-01-31|1997-02-15|DELIVER IN PERSON|TRUCK|ow requests wake around the regular accoun|
+5607|132|8|1|23|23738.99|0.02|0.06|R|F|1992-04-17|1992-02-12|1992-04-30|DELIVER IN PERSON|MAIL|the special, final patterns |
+5632|10|3|1|48|43680.48|0.06|0.06|N|O|1996-05-08|1996-03-24|1996-06-04|TAKE BACK RETURN|FOB|unts. decoys u|
+5632|106|7|2|21|21128.10|0.02|0.08|N|O|1996-03-22|1996-03-10|1996-04-10|NONE|AIR|refully regular pinto beans. ironic reques|
+5632|67|2|3|24|23209.44|0.04|0.06|N|O|1996-03-23|1996-04-02|1996-03-30|TAKE BACK RETURN|MAIL|beans detect. quickly final i|
+5633|160|2|1|28|29684.48|0.02|0.00|N|O|1998-08-14|1998-07-24|1998-08-26|TAKE BACK RETURN|SHIP|as boost quickly. unusual pinto |
+5633|102|3|2|10|10021.00|0.09|0.04|N|O|1998-07-15|1998-08-03|1998-08-03|COLLECT COD|AIR|its cajole fluffily fluffily special pinto|
+5633|46|7|3|27|25543.08|0.03|0.02|N|O|1998-09-28|1998-07-28|1998-10-12|DELIVER IN PERSON|AIR|ructions. even ideas haggle carefully r|
+5633|164|5|4|50|53208.00|0.02|0.05|N|O|1998-07-23|1998-07-09|1998-08-21|DELIVER IN PERSON|TRUCK|ts. slyly regular |
+5633|100|2|5|48|48004.80|0.01|0.05|N|O|1998-06-24|1998-07-22|1998-07-18|DELIVER IN PERSON|TRUCK|even courts haggle slyly at the requ|
+5633|107|2|6|1|1007.10|0.02|0.03|N|O|1998-09-29|1998-08-28|1998-10-19|NONE|RAIL|thely notornis: |
+5633|11|5|7|39|35529.39|0.02|0.08|N|O|1998-07-12|1998-07-03|1998-07-13|COLLECT COD|TRUCK|ding ideas cajole furiously after|
+5634|185|6|1|26|28214.68|0.10|0.08|N|O|1996-10-29|1996-09-15|1996-11-24|COLLECT COD|REG AIR|ptotes mold qu|
+5634|175|3|2|22|23653.74|0.02|0.05|N|O|1996-09-01|1996-08-31|1996-09-05|DELIVER IN PERSON|MAIL|silently unusual foxes above the blithely|
+5634|109|6|3|16|16145.60|0.08|0.02|N|O|1996-11-15|1996-09-14|1996-12-04|NONE|AIR|ess ideas are carefully pending, even re|
+5634|182|3|4|29|31383.22|0.00|0.01|N|O|1996-08-10|1996-10-29|1996-08-11|TAKE BACK RETURN|MAIL|ely final ideas. deposits sleep. reg|
+5634|1|2|5|1|901.00|0.04|0.02|N|O|1996-10-02|1996-10-21|1996-10-27|COLLECT COD|MAIL|ctions haggle carefully. carefully clo|
+5635|83|4|1|43|42272.44|0.03|0.00|R|F|1992-10-12|1992-09-29|1992-11-01|TAKE BACK RETURN|TRUCK|cross the d|
+5635|72|3|2|5|4860.35|0.05|0.08|R|F|1992-10-02|1992-11-05|1992-10-26|TAKE BACK RETURN|REG AIR|yly along the ironic, fi|
+5635|72|1|3|12|11664.84|0.09|0.02|A|F|1992-10-18|1992-09-24|1992-11-17|NONE|REG AIR|ke slyly against the carefully final req|
+5635|8|5|4|40|36320.00|0.03|0.01|A|F|1992-09-25|1992-11-05|1992-10-11|NONE|FOB|pending foxes. regular packages|
+5635|169|10|5|38|40628.08|0.05|0.06|A|F|1992-10-09|1992-09-25|1992-10-18|NONE|MAIL|ckly pendin|
+5635|162|9|6|23|24429.68|0.05|0.04|A|F|1992-08-24|1992-11-10|1992-09-21|NONE|AIR|ily pending packages. bold,|
+5635|137|3|7|32|33188.16|0.03|0.08|R|F|1992-11-24|1992-09-20|1992-12-17|TAKE BACK RETURN|TRUCK|slyly even|
+5636|70|9|1|18|17461.26|0.05|0.03|R|F|1995-05-14|1995-05-17|1995-06-12|DELIVER IN PERSON|REG AIR|slyly express requests. furiously pen|
+5636|70|5|2|26|25221.82|0.03|0.06|A|F|1995-03-05|1995-05-16|1995-03-23|TAKE BACK RETURN|AIR| furiously final pinto beans o|
+5636|90|1|3|21|20791.89|0.03|0.03|A|F|1995-03-13|1995-05-11|1995-03-24|COLLECT COD|AIR| are furiously unusual |
+5636|109|6|4|15|15136.50|0.03|0.04|R|F|1995-04-21|1995-04-30|1995-05-05|DELIVER IN PERSON|REG AIR|efully special|
+5636|47|4|5|13|12311.52|0.10|0.03|A|F|1995-05-11|1995-04-27|1995-05-26|COLLECT COD|AIR|en, fluffy accounts amon|
+5636|12|3|6|33|30096.33|0.06|0.04|A|F|1995-03-09|1995-04-05|1995-03-23|DELIVER IN PERSON|MAIL|ding to the |
+5636|134|10|7|24|24819.12|0.10|0.05|R|F|1995-04-12|1995-03-27|1995-04-16|DELIVER IN PERSON|RAIL|counts sleep furiously b|
+5637|47|4|1|14|13258.56|0.03|0.05|N|O|1996-07-20|1996-07-26|1996-08-14|COLLECT COD|MAIL|y bold deposits wak|
+5637|172|3|2|35|37525.95|0.09|0.08|N|O|1996-08-01|1996-08-04|1996-08-20|NONE|AIR|s sleep blithely alongside of the ironic|
+5637|96|10|3|22|21913.98|0.01|0.07|N|O|1996-08-28|1996-07-30|1996-09-17|COLLECT COD|REG AIR|nding requests are ca|
+5637|66|1|4|16|15456.96|0.03|0.03|N|O|1996-09-08|1996-08-31|1996-09-29|TAKE BACK RETURN|TRUCK|d packages. express requests|
+5637|196|7|5|10|10961.90|0.01|0.00|N|O|1996-08-25|1996-08-11|1996-09-23|TAKE BACK RETURN|MAIL|ickly ironic gifts. blithely even cour|
+5637|129|4|6|27|27786.24|0.01|0.05|N|O|1996-06-27|1996-08-09|1996-07-27|DELIVER IN PERSON|REG AIR|oss the carefully express warhorses|
+5638|138|9|1|45|46715.85|0.09|0.07|A|F|1994-05-17|1994-03-09|1994-06-15|NONE|TRUCK|ar foxes. fluffily pending accounts |
+5638|168|3|2|12|12817.92|0.02|0.05|A|F|1994-02-05|1994-04-01|1994-02-25|COLLECT COD|TRUCK|n, even requests. furiously ironic not|
+5638|162|9|3|21|22305.36|0.08|0.00|A|F|1994-03-13|1994-03-27|1994-03-17|DELIVER IN PERSON|TRUCK|press courts use f|
+5639|47|10|1|11|10417.44|0.09|0.02|R|F|1994-09-18|1994-07-10|1994-10-12|TAKE BACK RETURN|SHIP|g the unusual pinto beans caj|
+5664|122|1|1|25|25553.00|0.00|0.06|N|O|1998-10-29|1998-09-23|1998-11-25|COLLECT COD|FOB|eposits: furiously ironic grouch|
+5664|173|2|2|9|9658.53|0.07|0.05|N|O|1998-07-31|1998-08-26|1998-08-12|COLLECT COD|RAIL| ironic deposits haggle furiously. re|
+5664|53|4|3|31|29544.55|0.01|0.03|N|O|1998-11-10|1998-09-12|1998-12-07|TAKE BACK RETURN|FOB|ainst the never silent request|
+5664|138|9|4|33|34258.29|0.08|0.03|N|O|1998-08-29|1998-09-17|1998-09-25|DELIVER IN PERSON|RAIL|d the final |
+5664|112|2|5|44|44532.84|0.01|0.06|N|O|1998-09-24|1998-09-26|1998-10-23|NONE|TRUCK|ang thinly bold pa|
+5664|68|5|6|34|32914.04|0.09|0.01|N|O|1998-09-10|1998-10-05|1998-09-15|COLLECT COD|RAIL|st. fluffily pending foxes na|
+5664|182|3|7|9|9739.62|0.01|0.05|N|O|1998-11-04|1998-10-15|1998-11-20|TAKE BACK RETURN|REG AIR|yly. express ideas agai|
+5665|101|2|1|32|32035.20|0.00|0.02|A|F|1993-08-11|1993-08-01|1993-09-07|NONE|AIR|f the slyly even requests! regular request|
+5665|5|8|2|14|12670.00|0.02|0.00|R|F|1993-06-29|1993-09-16|1993-07-16|DELIVER IN PERSON|AIR|- special pinto beans sleep quickly blithel|
+5665|158|9|3|41|43384.15|0.09|0.02|A|F|1993-08-23|1993-09-22|1993-09-11|COLLECT COD|REG AIR| idle ideas across |
+5665|46|9|4|47|44463.88|0.01|0.01|A|F|1993-10-06|1993-09-19|1993-11-01|NONE|RAIL|s mold fluffily. final deposits along the|
+5666|122|5|1|7|7154.84|0.09|0.08|R|F|1994-05-10|1994-04-06|1994-05-21|NONE|FOB| ideas. regular packag|
+5666|36|7|2|14|13104.42|0.08|0.01|A|F|1994-02-27|1994-04-11|1994-03-06|DELIVER IN PERSON|TRUCK|lar deposits nag against the slyly final d|
+5666|193|6|3|39|42634.41|0.00|0.01|A|F|1994-05-13|1994-04-02|1994-06-12|DELIVER IN PERSON|TRUCK|the even, final foxes. quickly iron|
+5666|131|2|4|24|24747.12|0.07|0.01|R|F|1994-02-14|1994-03-09|1994-03-06|DELIVER IN PERSON|FOB|on the carefully pending asympto|
+5666|109|10|5|36|36327.60|0.07|0.07|R|F|1994-03-15|1994-03-16|1994-03-18|COLLECT COD|TRUCK|accounts. furiousl|
+5667|145|4|1|37|38670.18|0.09|0.06|N|O|1995-09-24|1995-09-17|1995-10-03|NONE|REG AIR|s cajole blit|
+5668|4|9|1|15|13560.00|0.03|0.04|A|F|1995-04-06|1995-05-12|1995-04-17|COLLECT COD|FOB| the express, pending requests. bo|
+5669|191|2|1|7|7638.33|0.06|0.06|N|O|1996-06-19|1996-07-07|1996-07-11|COLLECT COD|SHIP|yly regular requests lose blithely. careful|
+5669|156|8|2|2|2112.30|0.06|0.07|N|O|1996-08-04|1996-06-15|1996-08-20|NONE|SHIP| blithely excuses. slyly|
+5669|158|9|3|40|42326.00|0.00|0.02|N|O|1996-08-30|1996-06-15|1996-09-07|TAKE BACK RETURN|FOB|ar accounts alongside of the final, p|
+5669|90|1|4|31|30692.79|0.04|0.05|N|O|1996-08-05|1996-06-10|1996-08-29|COLLECT COD|AIR|to beans against the regular depo|
+5669|140|6|5|30|31204.20|0.07|0.01|N|O|1996-07-14|1996-07-28|1996-08-10|TAKE BACK RETURN|TRUCK|l accounts. care|
+5670|90|1|1|27|26732.43|0.10|0.06|R|F|1993-05-09|1993-05-30|1993-06-06|TAKE BACK RETURN|REG AIR| ideas promise bli|
+5670|186|7|2|43|46705.74|0.06|0.00|A|F|1993-07-09|1993-06-03|1993-07-14|DELIVER IN PERSON|FOB|ests in place of the carefully sly depos|
+5670|7|8|3|24|21768.00|0.09|0.04|A|F|1993-07-17|1993-07-01|1993-08-03|NONE|AIR|press, express requests haggle|
+5670|142|9|4|11|11463.54|0.06|0.06|R|F|1993-07-11|1993-06-26|1993-07-24|DELIVER IN PERSON|MAIL|etect furiously among the even pin|
+5671|120|7|1|25|25503.00|0.00|0.08|N|O|1998-04-17|1998-03-28|1998-05-06|DELIVER IN PERSON|AIR|cording to the quickly final requests-- |
+5671|129|8|2|46|47339.52|0.05|0.08|N|O|1998-03-28|1998-04-22|1998-04-19|TAKE BACK RETURN|MAIL|lar pinto beans detect care|
+5671|172|10|3|13|13938.21|0.10|0.06|N|O|1998-03-02|1998-04-03|1998-03-08|TAKE BACK RETURN|TRUCK|bold theodolites about|
+5671|111|1|4|42|42466.62|0.00|0.07|N|O|1998-02-17|1998-04-24|1998-03-17|TAKE BACK RETURN|SHIP|carefully slyly special deposit|
+5671|129|4|5|13|13378.56|0.09|0.00|N|O|1998-04-24|1998-03-26|1998-04-27|NONE|REG AIR|ers according to the ironic, unusual excu|
+5671|114|1|6|30|30423.30|0.09|0.07|N|O|1998-06-06|1998-04-15|1998-07-01|DELIVER IN PERSON|TRUCK|fily ironi|
+5696|137|3|1|28|29039.64|0.03|0.06|N|O|1995-07-03|1995-06-14|1995-07-27|COLLECT COD|REG AIR| the fluffily brave pearls |
+5696|59|1|2|46|44116.30|0.01|0.00|N|O|1995-08-10|1995-07-08|1995-08-25|COLLECT COD|AIR|ter the instruct|
+5696|167|2|3|42|44820.72|0.04|0.01|N|F|1995-06-06|1995-06-11|1995-06-19|TAKE BACK RETURN|SHIP|te furious|
+5696|98|10|4|20|19961.80|0.08|0.00|N|O|1995-06-25|1995-07-18|1995-07-16|NONE|TRUCK|silent, pending ideas sleep fluffil|
+5696|124|9|5|19|19458.28|0.07|0.05|N|O|1995-08-31|1995-06-13|1995-09-10|COLLECT COD|SHIP|unusual requests sleep furiously ru|
+5696|132|8|6|37|38188.81|0.04|0.05|N|O|1995-07-21|1995-06-23|1995-08-19|NONE|RAIL| carefully expres|
+5696|102|9|7|6|6012.60|0.07|0.05|N|O|1995-08-03|1995-07-15|1995-09-01|DELIVER IN PERSON|REG AIR|n patterns lose slyly fina|
+5697|55|7|1|24|22921.20|0.10|0.07|R|F|1992-10-27|1992-11-28|1992-11-20|NONE|RAIL|uffily iro|
+5697|16|10|2|43|39388.43|0.06|0.02|R|F|1992-12-08|1992-12-03|1992-12-17|TAKE BACK RETURN|FOB|blithely reg|
+5697|56|8|3|42|40154.10|0.03|0.01|A|F|1992-12-19|1992-12-08|1993-01-03|COLLECT COD|TRUCK|inal theodolites cajole after the bli|
+5698|11|8|1|30|27330.30|0.01|0.05|A|F|1994-05-26|1994-08-16|1994-06-19|COLLECT COD|AIR|its. quickly regular foxes aro|
+5698|163|4|2|25|26579.00|0.08|0.07|R|F|1994-08-06|1994-06-21|1994-08-25|NONE|SHIP| asymptotes sleep slyly above the|
+5698|155|3|3|45|47481.75|0.03|0.01|A|F|1994-06-23|1994-08-13|1994-07-02|NONE|FOB|ng excuses. slyly express asymptotes|
+5698|58|6|4|15|14370.75|0.07|0.08|R|F|1994-06-29|1994-07-03|1994-07-02|COLLECT COD|REG AIR|ly ironic frets haggle carefully |
+5698|140|1|5|37|38485.18|0.06|0.06|A|F|1994-06-30|1994-06-23|1994-07-22|TAKE BACK RETURN|SHIP|ts. even, ironic |
+5698|188|9|6|1|1088.18|0.06|0.04|R|F|1994-05-31|1994-07-10|1994-06-03|DELIVER IN PERSON|MAIL|nts. slyly quiet pinto beans nag carefu|
+5699|2|7|1|24|21648.00|0.01|0.07|A|F|1992-10-21|1992-09-04|1992-11-04|COLLECT COD|AIR|kages. fin|
+5699|55|10|2|26|24831.30|0.06|0.06|R|F|1992-08-11|1992-09-21|1992-08-14|COLLECT COD|MAIL|y final deposits wake fluffily u|
+5699|18|2|3|48|44064.48|0.10|0.05|R|F|1992-11-23|1992-10-20|1992-11-29|DELIVER IN PERSON|TRUCK|s. carefully regul|
+5699|55|3|4|46|43932.30|0.08|0.02|A|F|1992-11-28|1992-09-23|1992-12-27|TAKE BACK RETURN|FOB|o the slyly|
+5699|28|7|5|21|19488.42|0.02|0.02|A|F|1992-10-13|1992-09-30|1992-10-19|NONE|MAIL|lyly final pla|
+5699|191|5|6|30|32735.70|0.08|0.05|R|F|1992-11-13|1992-10-01|1992-12-11|DELIVER IN PERSON|AIR| the carefully final |
+5699|129|8|7|45|46310.40|0.09|0.06|A|F|1992-09-23|1992-10-22|1992-10-04|DELIVER IN PERSON|SHIP|rmanent packages sleep across the f|
+5700|168|5|1|24|25635.84|0.09|0.00|N|O|1997-12-26|1998-01-28|1998-01-18|DELIVER IN PERSON|REG AIR|ix carefully |
+5700|123|8|2|30|30693.60|0.00|0.06|N|O|1998-04-19|1998-03-13|1998-04-27|COLLECT COD|MAIL|ly blithely final instructions. fl|
+5700|126|5|3|23|23600.76|0.03|0.05|N|O|1998-01-30|1998-01-31|1998-01-31|NONE|REG AIR| wake quickly carefully fluffy hockey|
+5701|54|2|1|17|16218.85|0.02|0.05|N|O|1997-03-27|1997-04-08|1997-04-21|DELIVER IN PERSON|RAIL|tes. quickly final a|
+5702|77|7|1|44|42991.08|0.06|0.02|R|F|1994-01-04|1993-11-25|1994-01-22|NONE|RAIL|lites. carefully final requests doze b|
+5702|86|7|2|37|36484.96|0.10|0.05|R|F|1993-12-14|1993-10-21|1994-01-08|NONE|FOB|ix slyly. regular instructions slee|
+5702|131|7|3|44|45369.72|0.00|0.02|R|F|1993-11-28|1993-12-02|1993-12-22|NONE|TRUCK|ake according to th|
+5702|63|8|4|31|29854.86|0.00|0.04|A|F|1994-01-04|1993-10-22|1994-01-26|DELIVER IN PERSON|TRUCK|pinto beans. blithely |
+5703|88|9|1|2|1976.16|0.09|0.01|R|F|1993-05-29|1993-07-26|1993-06-05|TAKE BACK RETURN|REG AIR|nts against the blithely sile|
+5728|44|1|1|47|44369.88|0.10|0.05|A|F|1994-12-13|1995-01-25|1994-12-25|TAKE BACK RETURN|MAIL|nd the bravely final deposits. final ideas|
+5728|159|1|2|40|42366.00|0.05|0.08|A|F|1995-03-28|1995-01-17|1995-04-14|TAKE BACK RETURN|SHIP|final deposits. theodolite|
+5729|143|4|1|5|5215.70|0.07|0.00|R|F|1994-11-27|1994-11-11|1994-12-23|TAKE BACK RETURN|MAIL|s. even sheaves nag courts. |
+5729|107|10|2|39|39276.90|0.10|0.00|A|F|1995-01-22|1994-11-21|1995-02-13|TAKE BACK RETURN|MAIL|. special pl|
+5729|12|3|3|50|45600.50|0.00|0.05|R|F|1994-12-09|1994-12-31|1994-12-24|TAKE BACK RETURN|AIR|ly special sentiments. car|
+5730|151|2|1|2|2102.30|0.08|0.00|N|O|1998-02-24|1998-03-15|1998-03-11|COLLECT COD|SHIP|ely ironic foxes. carefu|
+5730|200|1|2|9|9901.80|0.10|0.01|N|O|1998-03-05|1998-02-02|1998-03-28|DELIVER IN PERSON|MAIL|s lose blithely. specia|
+5731|192|6|1|13|14198.47|0.02|0.04|N|O|1997-07-30|1997-06-23|1997-08-13|COLLECT COD|RAIL|ngside of the quickly regular depos|
+5731|105|6|2|11|11056.10|0.00|0.08|N|O|1997-06-06|1997-07-08|1997-06-25|NONE|MAIL| furiously final accounts wake. d|
+5731|111|2|3|6|6066.66|0.01|0.04|N|O|1997-07-02|1997-07-01|1997-07-08|COLLECT COD|SHIP|sits integrate slyly close platelets. quick|
+5731|14|1|4|6|5484.06|0.03|0.06|N|O|1997-09-07|1997-06-20|1997-09-20|TAKE BACK RETURN|RAIL|rs. quickly regular theo|
+5731|195|6|5|19|20808.61|0.08|0.02|N|O|1997-06-29|1997-06-27|1997-07-15|NONE|REG AIR|ly unusual ideas above the |
+5732|139|5|1|26|27017.38|0.02|0.07|N|O|1997-08-18|1997-10-25|1997-09-12|TAKE BACK RETURN|TRUCK|totes cajole according to the theodolites.|
+5733|33|4|1|39|36388.17|0.01|0.07|A|F|1993-03-22|1993-05-24|1993-04-04|DELIVER IN PERSON|FOB|side of the|
+5734|183|4|1|29|31412.22|0.05|0.01|N|O|1997-12-01|1997-12-08|1997-12-23|NONE|RAIL|structions cajole final, express |
+5734|150|3|2|6|6300.90|0.07|0.00|N|O|1997-10-27|1997-12-19|1997-11-02|COLLECT COD|RAIL|s. regular platelets cajole furiously. regu|
+5734|67|8|3|10|9670.60|0.01|0.03|N|O|1997-12-28|1997-12-24|1998-01-24|DELIVER IN PERSON|TRUCK|equests; accounts above|
+5735|60|1|1|41|39362.46|0.01|0.01|R|F|1994-12-23|1995-02-10|1995-01-22|COLLECT COD|MAIL|lthily ruthless i|
+5760|1|8|1|6|5406.00|0.09|0.03|R|F|1994-07-30|1994-07-31|1994-08-16|COLLECT COD|REG AIR|ng the acco|
+5760|6|1|2|24|21744.00|0.04|0.05|A|F|1994-07-15|1994-07-04|1994-08-08|NONE|MAIL|s. bravely ironic accounts among|
+5760|148|5|3|8|8385.12|0.07|0.04|A|F|1994-09-06|1994-08-03|1994-10-06|NONE|AIR|l accounts among the carefully even de|
+5760|123|4|4|19|19439.28|0.10|0.01|R|F|1994-08-02|1994-08-02|1994-08-15|COLLECT COD|SHIP|sits nag. even, regular ideas cajole b|
+5760|166|1|5|6|6396.96|0.03|0.07|R|F|1994-06-09|1994-07-06|1994-06-16|DELIVER IN PERSON|MAIL| shall have to cajole along the |
+5761|47|6|1|41|38828.64|0.08|0.00|N|O|1998-07-31|1998-08-09|1998-08-08|TAKE BACK RETURN|TRUCK|pecial deposits. qu|
+5761|108|9|2|36|36291.60|0.00|0.07|N|O|1998-09-07|1998-09-21|1998-09-11|TAKE BACK RETURN|TRUCK| pinto beans thrash alongside of the pendi|
+5761|198|2|3|49|53811.31|0.04|0.08|N|O|1998-07-14|1998-08-20|1998-07-25|NONE|SHIP|ly bold accounts wake above the|
+5762|175|6|1|6|6451.02|0.05|0.02|N|O|1997-04-07|1997-03-25|1997-05-02|NONE|AIR|ironic dependencies doze carefu|
+5762|102|9|2|27|27056.70|0.02|0.08|N|O|1997-02-21|1997-05-08|1997-03-23|NONE|REG AIR|across the bold ideas. carefully sp|
+5762|89|10|3|40|39563.20|0.00|0.08|N|O|1997-04-30|1997-05-09|1997-05-08|COLLECT COD|SHIP|al instructions. furiousl|
+5762|133|4|4|47|48557.11|0.05|0.06|N|O|1997-03-02|1997-03-23|1997-03-19|NONE|RAIL|equests sleep after the furiously ironic pa|
+5762|25|6|5|28|25900.56|0.02|0.06|N|O|1997-02-22|1997-03-25|1997-02-24|TAKE BACK RETURN|SHIP|ic foxes among the blithely qui|
+5762|12|6|6|12|10944.12|0.00|0.06|N|O|1997-04-18|1997-04-27|1997-05-11|DELIVER IN PERSON|REG AIR|ages are abo|
+5763|131|2|1|32|32996.16|0.02|0.06|N|O|1998-07-16|1998-09-13|1998-08-02|DELIVER IN PERSON|FOB|ding instruct|
+5763|136|2|2|23|23830.99|0.09|0.04|N|O|1998-07-25|1998-09-21|1998-08-15|DELIVER IN PERSON|SHIP|re after the blithel|
+5763|13|3|3|25|22825.25|0.01|0.02|N|O|1998-10-04|1998-08-16|1998-10-09|DELIVER IN PERSON|REG AIR|inal theodolites. even re|
+5763|121|6|4|47|47992.64|0.09|0.00|N|O|1998-08-22|1998-09-22|1998-09-04|NONE|REG AIR|gle slyly. slyly final re|
+5763|123|4|5|8|8184.96|0.06|0.05|N|O|1998-09-23|1998-09-15|1998-09-27|DELIVER IN PERSON|TRUCK|foxes wake slyly. car|
+5763|190|1|6|9|9811.71|0.08|0.02|N|O|1998-09-24|1998-09-01|1998-10-02|NONE|AIR| deposits. instru|
+5764|101|2|1|28|28030.80|0.04|0.04|A|F|1993-12-07|1993-12-20|1993-12-26|TAKE BACK RETURN|RAIL|sleep furi|
+5764|200|3|2|20|22004.00|0.10|0.05|A|F|1993-10-17|1993-12-24|1993-10-18|TAKE BACK RETURN|FOB|ng to the fluffily qu|
+5764|188|9|3|4|4352.72|0.03|0.05|A|F|1993-10-25|1993-12-23|1993-11-06|DELIVER IN PERSON|AIR|ily regular courts haggle|
+5765|162|7|1|31|32926.96|0.00|0.06|A|F|1995-01-11|1995-02-13|1995-01-23|TAKE BACK RETURN|AIR|r foxes. ev|
+5765|124|9|2|29|29699.48|0.07|0.08|A|F|1994-12-29|1995-02-01|1995-01-26|NONE|RAIL|nic requests. deposits wake quickly among |
+5765|139|10|3|31|32213.03|0.05|0.01|R|F|1995-03-01|1995-01-23|1995-03-31|TAKE BACK RETURN|REG AIR|the furiou|
+5765|152|4|4|46|48398.90|0.07|0.07|R|F|1995-03-13|1995-02-12|1995-03-20|DELIVER IN PERSON|MAIL|ccounts sleep about th|
+5765|174|3|5|48|51560.16|0.09|0.02|A|F|1995-03-30|1995-01-14|1995-04-09|DELIVER IN PERSON|SHIP|theodolites integrate furiously|
+5765|83|4|6|41|40306.28|0.04|0.00|A|F|1994-12-31|1995-02-11|1995-01-17|TAKE BACK RETURN|SHIP| furiously. slyly sile|
+5765|42|5|7|21|19782.84|0.05|0.04|R|F|1995-04-05|1995-02-12|1995-05-05|COLLECT COD|TRUCK|ole furiously. quick, special dependencies |
+5766|188|9|1|1|1088.18|0.10|0.01|R|F|1994-01-16|1993-11-16|1994-01-23|NONE|MAIL|blithely regular the|
+5766|149|8|2|39|40916.46|0.02|0.07|A|F|1993-10-24|1993-12-07|1993-11-08|DELIVER IN PERSON|SHIP| furiously unusual courts. slyly final pear|
+5766|118|8|3|4|4072.44|0.08|0.08|R|F|1993-11-10|1993-10-30|1993-12-01|COLLECT COD|TRUCK|ly even requests. furiou|
+5767|167|8|1|11|11738.76|0.08|0.01|A|F|1992-06-02|1992-05-30|1992-06-08|NONE|TRUCK|instructions. carefully final accou|
+5767|69|8|2|15|14535.90|0.07|0.05|R|F|1992-06-05|1992-07-28|1992-06-08|DELIVER IN PERSON|MAIL|warthogs. carefully unusual g|
+5767|191|3|3|42|45829.98|0.06|0.01|R|F|1992-07-31|1992-06-09|1992-08-09|COLLECT COD|TRUCK| blithe deposi|
+5767|153|4|4|34|35807.10|0.06|0.01|R|F|1992-06-02|1992-06-23|1992-06-17|NONE|FOB|sits among the|
+5767|46|7|5|36|34057.44|0.03|0.00|A|F|1992-07-17|1992-06-10|1992-07-19|COLLECT COD|AIR|ake carefully. packages |
+5792|178|8|1|34|36657.78|0.08|0.07|R|F|1993-05-23|1993-06-25|1993-06-12|NONE|RAIL|requests are against t|
+5792|157|5|2|47|49686.05|0.10|0.00|A|F|1993-06-08|1993-05-10|1993-06-26|COLLECT COD|AIR|regular, ironic excuses n|
+5792|183|4|3|32|34661.76|0.05|0.08|R|F|1993-06-26|1993-05-23|1993-07-07|COLLECT COD|RAIL|s are slyly against the ev|
+5792|14|8|4|14|12796.14|0.09|0.02|A|F|1993-07-28|1993-06-17|1993-08-27|DELIVER IN PERSON|RAIL|olites print carefully|
+5792|102|9|5|31|31065.10|0.02|0.01|A|F|1993-06-17|1993-05-05|1993-07-01|COLLECT COD|TRUCK|s? furiously even instructions |
+5793|53|5|1|20|19061.00|0.05|0.03|N|O|1997-10-05|1997-09-04|1997-10-30|COLLECT COD|AIR|e carefully ex|
+5793|170|5|2|41|43876.97|0.06|0.06|N|O|1997-08-04|1997-10-10|1997-08-12|DELIVER IN PERSON|TRUCK|snooze quick|
+5793|43|4|3|8|7544.32|0.07|0.03|N|O|1997-08-16|1997-09-08|1997-08-28|COLLECT COD|AIR|al foxes l|
+5793|148|7|4|48|50310.72|0.02|0.02|N|O|1997-09-27|1997-08-23|1997-10-27|DELIVER IN PERSON|REG AIR|quickly enticing excuses use slyly abov|
+5794|158|9|1|42|44442.30|0.06|0.05|R|F|1993-06-29|1993-05-30|1993-07-28|COLLECT COD|REG AIR|he careful|
+5794|115|2|2|14|14211.54|0.09|0.02|R|F|1993-04-19|1993-07-02|1993-05-18|COLLECT COD|SHIP|uriously carefully ironic reque|
+5794|7|8|3|15|13605.00|0.09|0.06|R|F|1993-06-25|1993-06-27|1993-07-09|NONE|MAIL|blithely regular ideas. final foxes haggle |
+5794|137|3|4|47|48745.11|0.00|0.08|A|F|1993-07-16|1993-06-21|1993-08-05|TAKE BACK RETURN|REG AIR|quests. blithely final excu|
+5795|193|6|1|34|37168.46|0.09|0.05|A|F|1992-08-21|1992-07-30|1992-08-27|COLLECT COD|REG AIR|al instructions must affix along the ironic|
+5796|58|3|1|27|25867.35|0.10|0.00|N|O|1996-04-06|1996-02-29|1996-04-20|DELIVER IN PERSON|RAIL|s wake quickly aro|
+5797|61|6|1|17|16338.02|0.09|0.03|N|O|1997-12-13|1998-01-12|1997-12-23|NONE|REG AIR|the ironic, even theodoli|
+5798|127|8|1|2|2054.24|0.09|0.00|N|O|1998-05-25|1998-06-22|1998-06-09|COLLECT COD|FOB|e furiously across |
+5798|124|9|2|14|14337.68|0.06|0.05|N|O|1998-04-01|1998-06-14|1998-04-27|NONE|RAIL|he special, bold packages. carefully iron|
+5798|134|5|3|22|22750.86|0.02|0.01|N|O|1998-06-24|1998-06-06|1998-07-20|COLLECT COD|TRUCK|sits poach carefully|
+5798|146|3|4|40|41845.60|0.08|0.06|N|O|1998-07-09|1998-06-24|1998-07-16|NONE|TRUCK| integrate carefu|
+5798|149|8|5|7|7343.98|0.06|0.07|N|O|1998-06-06|1998-05-10|1998-06-07|NONE|SHIP|ts against the blithely final p|
+5798|38|4|6|9|8442.27|0.06|0.02|N|O|1998-05-05|1998-05-25|1998-05-09|DELIVER IN PERSON|REG AIR|e blithely|
+5798|115|9|7|32|32483.52|0.08|0.01|N|O|1998-04-27|1998-05-03|1998-05-08|TAKE BACK RETURN|REG AIR|ubt blithely above the |
+5799|95|6|1|41|40798.69|0.04|0.02|N|O|1995-11-13|1995-10-31|1995-11-16|COLLECT COD|TRUCK|al accounts sleep ruthlessl|
+5799|100|3|2|30|30003.00|0.03|0.08|N|O|1995-09-12|1995-09-13|1995-09-19|NONE|RAIL| furiously s|
+5824|77|7|1|40|39082.80|0.06|0.06|N|O|1997-01-14|1997-01-17|1997-02-02|NONE|REG AIR|he final packag|
+5824|182|3|2|42|45451.56|0.09|0.00|N|O|1997-02-01|1997-02-20|1997-02-07|COLLECT COD|SHIP|ts sleep. carefully regular accounts h|
+5824|73|1|3|16|15569.12|0.03|0.02|N|O|1997-02-13|1997-01-07|1997-02-17|TAKE BACK RETURN|TRUCK|sly express Ti|
+5824|92|5|4|32|31746.88|0.03|0.02|N|O|1997-02-16|1997-01-24|1997-02-20|DELIVER IN PERSON|RAIL|ven requests. |
+5824|107|8|5|44|44312.40|0.08|0.03|N|O|1997-01-24|1997-01-31|1997-02-11|COLLECT COD|TRUCK|fily fluffily bold|
+5825|159|7|1|23|24360.45|0.10|0.05|R|F|1995-05-10|1995-04-28|1995-05-13|DELIVER IN PERSON|TRUCK| special pinto beans. dependencies haggl|
+5826|144|1|1|4|4176.56|0.03|0.06|N|O|1998-07-31|1998-09-10|1998-08-27|NONE|AIR| packages across the fluffily spec|
+5826|64|5|2|18|17353.08|0.04|0.01|N|O|1998-07-17|1998-09-03|1998-07-22|NONE|SHIP|atelets use above t|
+5827|187|8|1|30|32615.40|0.03|0.05|N|O|1998-11-11|1998-09-27|1998-11-30|DELIVER IN PERSON|RAIL|ounts may c|
+5827|103|6|2|23|23071.30|0.09|0.05|N|O|1998-11-16|1998-09-14|1998-11-17|COLLECT COD|RAIL|ans. furiously special instruct|
+5827|164|1|3|3|3192.48|0.03|0.06|N|O|1998-10-17|1998-09-29|1998-10-28|DELIVER IN PERSON|MAIL|uses eat along the furiously|
+5827|200|1|4|26|28605.20|0.06|0.00|N|O|1998-07-29|1998-09-24|1998-07-30|DELIVER IN PERSON|SHIP|arefully special packages wake thin|
+5827|112|9|5|38|38460.18|0.03|0.06|N|O|1998-10-18|1998-08-27|1998-10-23|TAKE BACK RETURN|TRUCK|ly ruthless accounts|
+5827|17|4|6|14|12838.14|0.05|0.01|N|O|1998-08-31|1998-09-06|1998-09-13|TAKE BACK RETURN|RAIL|rges. fluffily pending |
+5828|2|9|1|28|25256.00|0.10|0.03|A|F|1994-05-15|1994-05-20|1994-06-08|DELIVER IN PERSON|MAIL| special ideas haggle slyly ac|
+5828|158|3|2|37|39151.55|0.01|0.00|R|F|1994-06-07|1994-05-30|1994-06-17|NONE|RAIL|e carefully spec|
+5829|40|1|1|4|3760.16|0.01|0.02|N|O|1997-03-01|1997-02-17|1997-03-22|NONE|TRUCK|ithely; accounts cajole ideas. regular foxe|
+5829|107|10|2|40|40284.00|0.04|0.01|N|O|1997-04-21|1997-02-12|1997-05-04|COLLECT COD|TRUCK| the carefully ironic accounts. a|
+5829|129|8|3|6|6174.72|0.05|0.06|N|O|1997-01-22|1997-03-12|1997-02-02|TAKE BACK RETURN|AIR|sts. slyly special fo|
+5829|90|1|4|42|41583.78|0.02|0.07|N|O|1997-03-26|1997-04-01|1997-03-30|COLLECT COD|REG AIR|pearls. slyly bold deposits solve final|
+5829|191|5|5|49|53468.31|0.05|0.01|N|O|1997-01-31|1997-03-13|1997-02-18|NONE|MAIL| ironic excuses use fluf|
+5829|18|5|6|17|15606.17|0.09|0.02|N|O|1997-04-10|1997-03-29|1997-04-22|COLLECT COD|AIR|after the furiously ironic ideas no|
+5829|78|9|7|27|26407.89|0.08|0.04|N|O|1997-02-25|1997-03-31|1997-03-03|DELIVER IN PERSON|AIR|ns about the excuses are c|
+5830|160|2|1|29|30744.64|0.10|0.02|R|F|1993-06-19|1993-05-10|1993-07-13|DELIVER IN PERSON|REG AIR|y bold excuses|
+5831|191|2|1|2|2182.38|0.10|0.01|N|O|1997-02-09|1997-01-20|1997-03-07|TAKE BACK RETURN|TRUCK|quickly silent req|
+5831|74|3|2|33|32144.31|0.04|0.03|N|O|1996-11-20|1997-01-18|1996-12-18|TAKE BACK RETURN|MAIL| instructions wake. slyly sil|
+5831|82|3|3|6|5892.48|0.05|0.07|N|O|1997-01-29|1997-01-14|1997-02-09|NONE|MAIL|ly ironic accounts nag pendin|
+5831|13|10|4|46|41998.46|0.06|0.02|N|O|1997-02-24|1997-01-18|1997-03-02|COLLECT COD|MAIL|ly final pa|
+5831|43|4|5|37|34892.48|0.05|0.01|N|O|1997-01-17|1997-02-08|1997-02-01|NONE|FOB|uriously even requests|
+5856|4|1|1|1|904.00|0.03|0.02|A|F|1994-12-29|1995-01-07|1995-01-10|TAKE BACK RETURN|MAIL|tly. special deposits wake blithely even|
+5856|35|6|2|35|32726.05|0.09|0.02|R|F|1994-11-24|1994-12-23|1994-11-30|COLLECT COD|AIR|excuses. finally ir|
+5856|153|4|3|39|41072.85|0.05|0.03|A|F|1995-01-18|1995-01-11|1995-01-19|DELIVER IN PERSON|TRUCK|uickly quickly fluffy in|
+5857|58|9|1|25|23951.25|0.03|0.02|N|O|1997-12-02|1997-12-17|1997-12-08|DELIVER IN PERSON|REG AIR|ding platelets. pending excu|
+5857|195|9|2|50|54759.50|0.06|0.07|N|O|1997-12-04|1997-12-16|1997-12-20|NONE|TRUCK|y regular d|
+5857|68|3|3|1|968.06|0.03|0.01|N|O|1998-02-01|1997-12-09|1998-02-20|TAKE BACK RETURN|SHIP|instructions detect final reques|
+5857|118|2|4|12|12217.32|0.03|0.08|N|O|1998-01-24|1997-12-27|1998-02-10|TAKE BACK RETURN|AIR|counts. express, final|
+5857|192|4|5|14|15290.66|0.07|0.07|N|O|1997-12-10|1998-01-06|1998-01-04|TAKE BACK RETURN|TRUCK|ffily pendin|
+5857|93|5|6|49|48661.41|0.00|0.04|N|O|1998-01-23|1997-12-12|1998-01-28|DELIVER IN PERSON|REG AIR|egular pinto beans|
+5858|121|4|1|20|20422.40|0.02|0.06|A|F|1992-07-23|1992-08-26|1992-07-24|COLLECT COD|SHIP|uffily unusual pinto beans sleep|
+5858|16|7|2|36|32976.36|0.00|0.05|A|F|1992-09-25|1992-08-16|1992-10-11|NONE|SHIP|osits wake quickly quickly sile|
+5858|148|5|3|7|7336.98|0.08|0.02|A|F|1992-10-07|1992-08-16|1992-10-15|TAKE BACK RETURN|REG AIR|. doggedly regular packages use pendin|
+5858|164|9|4|46|48951.36|0.07|0.06|R|F|1992-09-07|1992-10-06|1992-10-06|DELIVER IN PERSON|MAIL|posits withi|
+5858|161|8|5|18|19100.88|0.00|0.07|A|F|1992-11-05|1992-10-08|1992-12-03|NONE|TRUCK|al excuses. bold|
+5858|154|9|6|7|7379.05|0.04|0.00|A|F|1992-09-14|1992-10-01|1992-10-01|TAKE BACK RETURN|RAIL|dly pending ac|
+5858|11|5|7|50|45550.50|0.06|0.00|R|F|1992-07-20|1992-10-07|1992-07-25|NONE|TRUCK|r the ironic ex|
+5859|175|4|1|50|53758.50|0.07|0.01|N|O|1997-07-08|1997-06-20|1997-07-27|COLLECT COD|MAIL|ly regular deposits use. ironic|
+5859|9|6|2|17|15453.00|0.03|0.03|N|O|1997-05-15|1997-06-30|1997-05-26|DELIVER IN PERSON|AIR|ly ironic requests. quickly unusual pin|
+5859|46|3|3|33|31219.32|0.10|0.04|N|O|1997-07-08|1997-06-22|1997-07-18|TAKE BACK RETURN|TRUCK|eposits unwind furiously final pinto bea|
+5859|93|4|4|40|39723.60|0.09|0.02|N|O|1997-08-05|1997-06-17|1997-08-20|NONE|REG AIR|l dependenci|
+5859|153|8|5|35|36860.25|0.00|0.08|N|O|1997-05-28|1997-07-14|1997-06-15|COLLECT COD|TRUCK|egular acco|
+5859|44|5|6|9|8496.36|0.01|0.02|N|O|1997-06-15|1997-06-06|1997-06-20|NONE|RAIL|ges boost quickly. blithely r|
+5859|191|5|7|27|29462.13|0.05|0.08|N|O|1997-07-30|1997-07-08|1997-08-08|NONE|MAIL| across th|
+5860|51|3|1|10|9510.50|0.04|0.04|A|F|1992-03-11|1992-03-30|1992-03-31|NONE|MAIL|ual patterns try to eat carefully above|
+5861|191|5|1|32|34918.08|0.00|0.03|N|O|1997-05-27|1997-05-29|1997-05-28|TAKE BACK RETURN|MAIL|nt asymptotes. carefully express request|
+5861|86|7|2|6|5916.48|0.10|0.03|N|O|1997-07-28|1997-05-18|1997-08-24|TAKE BACK RETURN|TRUCK|olites. slyly|
+5862|113|7|1|4|4052.44|0.09|0.06|N|O|1997-06-04|1997-04-26|1997-06-19|NONE|TRUCK|yly silent deposit|
+5862|2|7|2|29|26158.00|0.03|0.05|N|O|1997-04-02|1997-04-16|1997-04-04|NONE|FOB|e fluffily. furiously|
+5863|161|10|1|45|47752.20|0.07|0.06|A|F|1993-12-19|1994-01-25|1994-01-05|NONE|REG AIR| deposits are ab|
+5863|160|8|2|21|22263.36|0.09|0.03|R|F|1994-01-13|1994-01-09|1994-01-28|DELIVER IN PERSON|FOB|atelets nag blithely furi|
+5888|62|7|1|46|44254.76|0.02|0.00|N|O|1996-11-18|1996-11-05|1996-12-08|TAKE BACK RETURN|FOB|yly final accounts hag|
+5888|112|3|2|24|24290.64|0.03|0.01|N|O|1996-11-07|1996-11-30|1996-11-20|COLLECT COD|SHIP|ing to the spe|
+5889|77|7|1|17|16610.19|0.09|0.02|N|O|1995-07-01|1995-08-12|1995-07-25|NONE|AIR|blithely pending packages. flu|
+5890|113|4|1|38|38498.18|0.01|0.08|A|F|1993-02-14|1992-12-09|1993-02-27|COLLECT COD|FOB| accounts. carefully final asymptotes|
+5891|85|6|1|22|21671.76|0.00|0.06|R|F|1993-01-01|1993-02-18|1993-01-14|DELIVER IN PERSON|TRUCK|iresias cajole deposits. special, ir|
+5891|186|7|2|9|9775.62|0.03|0.07|R|F|1993-01-20|1993-02-27|1993-02-10|COLLECT COD|REG AIR|cajole carefully |
+5891|30|9|3|10|9300.30|0.08|0.01|A|F|1993-04-14|1993-02-07|1993-04-15|DELIVER IN PERSON|RAIL|nding requests. b|
+5892|148|9|1|7|7336.98|0.02|0.03|N|O|1995-06-26|1995-07-18|1995-07-25|COLLECT COD|AIR|e furiously. quickly even deposits da|
+5892|150|9|2|37|38855.55|0.09|0.06|N|O|1995-08-12|1995-06-11|1995-09-05|NONE|REG AIR|maintain. bold, expre|
+5892|3|4|3|28|25284.00|0.03|0.06|N|O|1995-08-16|1995-07-06|1995-08-22|DELIVER IN PERSON|MAIL|ithely unusual accounts will have to integ|
+5892|75|6|4|23|22426.61|0.08|0.04|R|F|1995-05-18|1995-07-06|1995-05-29|COLLECT COD|MAIL| foxes nag slyly about the qui|
+5893|134|10|1|43|44467.59|0.05|0.02|R|F|1992-11-02|1992-09-27|1992-11-21|TAKE BACK RETURN|RAIL|s. regular courts above the carefully silen|
+5893|2|9|2|2|1804.00|0.10|0.04|R|F|1992-07-18|1992-09-10|1992-08-12|NONE|RAIL|ckages wake sly|
+5894|8|5|1|23|20884.00|0.04|0.08|A|F|1994-09-05|1994-10-27|1994-09-13|NONE|TRUCK| furiously even deposits haggle alw|
+5894|79|8|2|48|46995.36|0.04|0.08|A|F|1994-09-04|1994-11-03|1994-09-17|NONE|TRUCK| asymptotes among the blithely silent |
+5895|15|9|1|38|34770.38|0.05|0.08|N|O|1997-04-05|1997-03-06|1997-05-03|DELIVER IN PERSON|RAIL|ts are furiously. regular, final excuses |
+5895|122|3|2|47|48039.64|0.04|0.06|N|O|1997-04-27|1997-03-17|1997-05-07|DELIVER IN PERSON|AIR|r packages wake carefull|
+5895|84|5|3|49|48219.92|0.03|0.07|N|O|1997-03-15|1997-02-17|1997-04-04|NONE|TRUCK|permanent foxes. packages|
+5895|146|7|4|31|32430.34|0.03|0.01|N|O|1997-03-03|1997-03-30|1997-03-08|TAKE BACK RETURN|TRUCK| final deposits nod slyly careful|
+5895|200|1|5|20|22004.00|0.07|0.00|N|O|1997-04-30|1997-02-07|1997-05-08|DELIVER IN PERSON|AIR|gular deposits wake blithely carefully fin|
+5895|78|7|6|15|14671.05|0.08|0.08|N|O|1997-04-19|1997-03-09|1997-05-13|TAKE BACK RETURN|RAIL|silent package|
+5920|187|8|1|50|54359.00|0.06|0.00|A|F|1995-03-13|1995-01-03|1995-03-31|TAKE BACK RETURN|RAIL|across the carefully pending platelets|
+5920|58|9|2|24|22993.20|0.01|0.05|A|F|1994-12-28|1995-01-21|1994-12-31|DELIVER IN PERSON|FOB|fully regular dolphins. furiousl|
+5920|117|1|3|2|2034.22|0.08|0.07|A|F|1995-02-18|1995-01-13|1995-03-04|NONE|SHIP| evenly spe|
+5920|12|2|4|28|25536.28|0.06|0.02|R|F|1994-12-17|1995-02-13|1994-12-31|NONE|SHIP|le slyly slyly even deposits. f|
+5920|100|4|5|42|42004.20|0.09|0.08|A|F|1994-12-18|1995-01-07|1995-01-14|COLLECT COD|AIR|lar, ironic dependencies sno|
+5921|99|3|1|44|43959.96|0.07|0.01|R|F|1994-07-14|1994-06-30|1994-07-15|NONE|TRUCK|ain about the special|
+5921|146|9|2|25|26153.50|0.06|0.01|A|F|1994-05-19|1994-06-15|1994-06-17|COLLECT COD|TRUCK|nd the slyly regular deposits. quick|
+5921|68|5|3|17|16457.02|0.06|0.01|R|F|1994-05-20|1994-05-26|1994-05-23|NONE|FOB|final asymptotes. even packages boost |
+5921|28|7|4|26|24128.52|0.03|0.04|A|F|1994-05-03|1994-07-06|1994-05-06|NONE|AIR|hy dependenc|
+5921|143|10|5|41|42768.74|0.04|0.02|R|F|1994-04-13|1994-05-31|1994-04-26|DELIVER IN PERSON|AIR|nusual, regular theodol|
+5921|115|6|6|5|5075.55|0.02|0.00|R|F|1994-06-01|1994-05-07|1994-06-10|COLLECT COD|TRUCK|eas cajole across the final, fi|
+5922|196|10|1|9|9865.71|0.07|0.00|N|O|1996-12-04|1997-01-20|1996-12-08|DELIVER IN PERSON|RAIL|haggle slyly even packages. packages|
+5922|157|2|2|37|39114.55|0.01|0.04|N|O|1996-12-19|1996-12-16|1997-01-15|COLLECT COD|RAIL|s wake slyly. requests cajole furiously asy|
+5922|90|1|3|35|34653.15|0.08|0.00|N|O|1996-12-12|1997-01-21|1997-01-01|DELIVER IN PERSON|SHIP|accounts. regu|
+5922|66|7|4|13|12558.78|0.08|0.07|N|O|1997-03-08|1996-12-26|1997-04-03|DELIVER IN PERSON|FOB|sly special accounts wake ironically.|
+5922|57|5|5|39|37324.95|0.04|0.07|N|O|1997-03-04|1997-01-17|1997-03-25|TAKE BACK RETURN|SHIP|e of the instructions. quick|
+5922|179|9|6|10|10791.70|0.04|0.01|N|O|1997-02-23|1996-12-26|1997-03-04|NONE|REG AIR|sly regular deposits haggle quickly ins|
+5923|177|8|1|27|29083.59|0.08|0.03|N|O|1997-08-16|1997-06-27|1997-08-29|DELIVER IN PERSON|RAIL|arefully i|
+5923|119|3|2|42|42802.62|0.01|0.08|N|O|1997-09-16|1997-07-23|1997-09-27|COLLECT COD|REG AIR|y regular theodolites w|
+5923|108|5|3|2|2016.20|0.06|0.05|N|O|1997-06-19|1997-07-31|1997-06-28|TAKE BACK RETURN|TRUCK|express patterns. even deposits|
+5923|174|4|4|46|49411.82|0.05|0.04|N|O|1997-07-29|1997-07-23|1997-08-23|COLLECT COD|SHIP|nto beans cajole blithe|
+5923|59|4|5|35|33566.75|0.04|0.05|N|O|1997-07-21|1997-07-11|1997-08-01|DELIVER IN PERSON|AIR|sts affix unusual, final requests. request|
+5924|176|5|1|38|40894.46|0.06|0.05|N|O|1995-12-17|1995-12-11|1996-01-06|TAKE BACK RETURN|AIR|ions cajole carefully along the |
+5924|53|1|2|49|46699.45|0.04|0.00|N|O|1995-10-25|1995-12-11|1995-11-08|NONE|MAIL|inly final excuses. blithely regular requ|
+5924|17|8|3|24|22008.24|0.09|0.08|N|O|1996-01-12|1995-12-13|1996-01-25|COLLECT COD|REG AIR| use carefully. special, e|
+5925|87|8|1|42|41457.36|0.05|0.02|N|O|1996-03-05|1996-01-13|1996-03-10|COLLECT COD|SHIP|to the furiously|
+5925|125|4|2|31|31778.72|0.03|0.03|N|O|1996-01-02|1995-12-14|1996-01-07|TAKE BACK RETURN|FOB|e slyly. furiously regular deposi|
+5925|89|10|3|50|49454.00|0.03|0.04|N|O|1996-02-14|1996-01-10|1996-02-15|NONE|TRUCK|es. stealthily express pains print bli|
+5925|54|9|4|30|28621.50|0.02|0.07|N|O|1996-02-21|1996-02-11|1996-03-10|NONE|TRUCK| the packa|
+5925|160|1|5|41|43466.56|0.00|0.06|N|O|1996-02-03|1995-12-24|1996-02-20|NONE|SHIP| across the pending deposits nag caref|
+5925|50|9|6|48|45602.40|0.02|0.00|N|O|1996-02-03|1996-01-19|1996-03-04|DELIVER IN PERSON|REG AIR| haggle after the fo|
+5926|90|1|1|8|7920.72|0.02|0.00|R|F|1994-07-17|1994-07-20|1994-08-11|COLLECT COD|MAIL|gle furiously express foxes. bo|
+5926|50|9|2|27|25651.35|0.09|0.05|A|F|1994-07-05|1994-08-11|1994-08-02|DELIVER IN PERSON|MAIL|ironic requests|
+5926|127|8|3|46|47247.52|0.01|0.03|R|F|1994-09-05|1994-08-12|1994-09-11|COLLECT COD|RAIL|ts integrate. courts haggl|
+5926|190|1|4|23|25074.37|0.01|0.02|A|F|1994-07-23|1994-08-10|1994-07-27|DELIVER IN PERSON|FOB|ickly special packages among |
+5927|90|1|1|44|43563.96|0.04|0.05|N|O|1997-11-29|1997-11-21|1997-12-13|DELIVER IN PERSON|TRUCK|rding to the special, final decoy|
+5927|115|2|2|8|8120.88|0.04|0.05|N|O|1997-09-24|1997-11-15|1997-10-22|TAKE BACK RETURN|SHIP|ilent dependencies nod c|
+5927|167|6|3|32|34149.12|0.10|0.07|N|O|1997-12-26|1997-10-27|1997-12-31|COLLECT COD|AIR|telets. carefully bold accounts was|
+5952|200|2|1|49|53909.80|0.10|0.02|N|O|1997-06-30|1997-07-10|1997-07-02|COLLECT COD|AIR|e furiously regular|
+5952|191|5|2|11|12003.09|0.10|0.05|N|O|1997-05-13|1997-06-04|1997-05-27|DELIVER IN PERSON|FOB|y nag blithely aga|
+5952|71|2|3|43|41756.01|0.01|0.01|N|O|1997-06-29|1997-06-06|1997-07-15|COLLECT COD|MAIL|posits sleep furiously quickly final p|
+5952|158|3|4|23|24337.45|0.00|0.07|N|O|1997-05-13|1997-06-27|1997-05-20|NONE|TRUCK|e blithely packages. eve|
+5953|129|10|1|36|37048.32|0.03|0.00|R|F|1992-05-28|1992-06-24|1992-05-29|DELIVER IN PERSON|FOB| cajole furio|
+5953|13|7|2|34|31042.34|0.03|0.04|A|F|1992-05-04|1992-06-12|1992-06-02|NONE|RAIL|hockey players use furiously against th|
+5953|162|9|3|5|5310.80|0.07|0.06|A|F|1992-04-10|1992-04-27|1992-04-14|NONE|SHIP|s. blithely |
+5953|169|8|4|23|24590.68|0.09|0.02|R|F|1992-06-05|1992-06-03|1992-06-29|TAKE BACK RETURN|FOB|he silent ideas. silent foxes po|
+5954|147|6|1|8|8377.12|0.03|0.00|A|F|1993-03-27|1993-01-22|1993-04-04|TAKE BACK RETURN|AIR|unusual th|
+5954|81|2|2|40|39243.20|0.02|0.01|A|F|1992-12-30|1993-01-16|1993-01-09|COLLECT COD|RAIL|iously ironic deposits after|
+5954|94|8|3|20|19881.80|0.09|0.07|A|F|1992-12-25|1993-02-05|1992-12-31|COLLECT COD|REG AIR| accounts wake carefu|
+5954|145|4|4|20|20902.80|0.00|0.01|R|F|1993-02-27|1993-01-04|1993-03-08|NONE|TRUCK|ke furiously blithely special packa|
+5954|100|4|5|35|35003.50|0.04|0.06|A|F|1993-03-17|1993-02-06|1993-04-10|NONE|SHIP|tions maintain slyly. furious|
+5954|193|5|6|39|42634.41|0.04|0.08|A|F|1993-02-27|1993-02-25|1993-03-29|DELIVER IN PERSON|REG AIR| always regular dolphins. furiously p|
+5955|140|1|1|14|14561.96|0.08|0.08|N|O|1995-06-22|1995-05-23|1995-06-24|DELIVER IN PERSON|TRUCK| unusual, bold theodolit|
+5955|62|7|2|15|14430.90|0.08|0.07|R|F|1995-04-22|1995-05-28|1995-04-27|NONE|FOB|y final accounts above the regu|
+5955|112|9|3|40|40484.40|0.03|0.00|R|F|1995-04-01|1995-06-11|1995-04-27|NONE|FOB|oss the fluffily regular|
+5956|155|3|1|10|10551.50|0.04|0.05|N|O|1998-07-27|1998-07-04|1998-08-21|NONE|MAIL|ic packages am|
+5956|55|7|2|23|21966.15|0.08|0.03|N|O|1998-06-06|1998-07-10|1998-06-15|DELIVER IN PERSON|RAIL|ly slyly special |
+5956|175|5|3|47|50532.99|0.04|0.06|N|O|1998-09-06|1998-06-29|1998-09-18|TAKE BACK RETURN|MAIL|lyly express theodol|
+5956|20|10|4|40|36800.80|0.09|0.05|N|O|1998-06-11|1998-07-19|1998-06-21|NONE|MAIL|final theodolites sleep carefully ironic c|
+5957|15|9|1|37|33855.37|0.07|0.00|A|F|1994-04-18|1994-02-19|1994-05-11|NONE|AIR| ideas use ruthlessly.|
+5957|59|4|2|46|44116.30|0.04|0.08|A|F|1994-01-23|1994-01-30|1994-02-07|NONE|SHIP|platelets. furiously unusual requests |
+5957|2|7|3|17|15334.00|0.01|0.01|A|F|1994-01-24|1994-02-16|1994-02-08|TAKE BACK RETURN|SHIP|. final, pending packages|
+5957|132|3|4|29|29931.77|0.01|0.03|R|F|1994-02-24|1994-03-04|1994-03-08|COLLECT COD|REG AIR|sits. final, even asymptotes cajole quickly|
+5957|88|9|5|40|39523.20|0.04|0.04|R|F|1994-01-07|1994-02-05|1994-01-26|DELIVER IN PERSON|SHIP|ironic asymptotes sleep blithely again|
+5957|6|1|6|41|37146.00|0.10|0.07|R|F|1994-03-25|1994-02-20|1994-03-31|DELIVER IN PERSON|MAIL|es across the regular requests maint|
+5957|159|1|7|32|33892.80|0.10|0.07|A|F|1994-03-05|1994-02-20|1994-03-09|NONE|TRUCK| boost carefully across the |
+5958|149|8|1|33|34621.62|0.02|0.04|N|O|1995-09-24|1995-12-12|1995-10-05|COLLECT COD|MAIL|lar, regular accounts wake furi|
+5958|43|6|2|23|21689.92|0.03|0.04|N|O|1995-09-26|1995-10-19|1995-09-27|COLLECT COD|SHIP|regular requests. bold, bold deposits unwin|
+5958|153|8|3|42|44232.30|0.10|0.00|N|O|1995-12-12|1995-10-19|1996-01-09|NONE|AIR|n accounts. final, ironic packages |
+5958|39|10|4|18|16902.54|0.04|0.05|N|O|1995-12-02|1995-10-17|1995-12-22|COLLECT COD|FOB|regular requests haggle|
+5958|132|8|5|32|33028.16|0.06|0.00|N|O|1995-09-20|1995-12-10|1995-10-14|COLLECT COD|REG AIR|e carefully special theodolites. carefully |
+5959|135|1|1|49|50721.37|0.07|0.03|R|F|1992-07-16|1992-08-09|1992-08-14|DELIVER IN PERSON|SHIP|usual packages haggle slyly pi|
+5959|147|8|2|17|17801.38|0.09|0.07|R|F|1992-06-10|1992-07-06|1992-06-23|COLLECT COD|MAIL|ackages. blithely ex|
+5959|5|6|3|4|3620.00|0.04|0.03|R|F|1992-06-14|1992-07-05|1992-07-01|NONE|MAIL|gular requests ar|
+5959|196|7|4|13|14250.47|0.03|0.00|A|F|1992-07-29|1992-07-13|1992-08-20|COLLECT COD|SHIP|ar forges. deposits det|
+5959|40|6|5|37|34781.48|0.04|0.01|R|F|1992-06-05|1992-07-18|1992-06-29|NONE|TRUCK|endencies. brai|
+5959|119|3|6|35|35668.85|0.03|0.00|A|F|1992-05-27|1992-06-19|1992-06-23|NONE|TRUCK|ely silent deposits. |
+5959|43|10|7|47|44322.88|0.02|0.01|R|F|1992-08-28|1992-07-24|1992-09-09|TAKE BACK RETURN|RAIL|deposits. slyly special cou|
+5984|70|5|1|13|12610.91|0.06|0.07|R|F|1994-10-16|1994-09-06|1994-11-11|NONE|MAIL|lar platelets. f|
+5984|102|3|2|25|25052.50|0.05|0.08|R|F|1994-10-06|1994-07-21|1994-10-28|COLLECT COD|RAIL|gular accounts. even packages nag slyly|
+5984|1|4|3|8|7208.00|0.10|0.00|R|F|1994-09-17|1994-08-28|1994-09-25|COLLECT COD|RAIL|its. express,|
+5984|190|1|4|35|38156.65|0.00|0.01|A|F|1994-08-25|1994-08-05|1994-08-31|DELIVER IN PERSON|SHIP|le fluffily regula|
+5985|86|7|1|4|3944.32|0.02|0.02|A|F|1995-05-04|1995-04-01|1995-05-17|DELIVER IN PERSON|MAIL|ole along the quickly slow d|
+5986|79|7|1|26|25455.82|0.00|0.00|R|F|1992-08-10|1992-05-23|1992-08-24|TAKE BACK RETURN|SHIP|e fluffily ironic ideas. silent |
+5986|196|8|2|25|27404.75|0.03|0.06|A|F|1992-06-16|1992-07-17|1992-06-29|TAKE BACK RETURN|MAIL| instructions. slyly regular de|
+5986|30|5|3|1|930.03|0.07|0.06|A|F|1992-05-21|1992-06-21|1992-05-24|DELIVER IN PERSON|REG AIR|fix quickly quickly final deposits. fluffil|
+5986|90|1|4|31|30692.79|0.00|0.03|A|F|1992-08-21|1992-06-29|1992-09-14|NONE|AIR|structions! furiously pending instructi|
+5986|136|7|5|6|6216.78|0.05|0.02|A|F|1992-07-16|1992-06-10|1992-07-29|DELIVER IN PERSON|RAIL|al foxes within the slyly speci|
+5987|23|2|1|1|923.02|0.01|0.04|N|O|1996-09-13|1996-10-29|1996-09-21|DELIVER IN PERSON|REG AIR|refully final excuses haggle furiously ag|
+5987|176|5|2|20|21523.40|0.10|0.06|N|O|1996-11-28|1996-09-17|1996-12-05|TAKE BACK RETURN|RAIL|ing excuses nag quickly always bold|
+5987|92|3|3|43|42659.87|0.08|0.04|N|O|1996-10-30|1996-10-13|1996-11-12|NONE|AIR|theodolites wake above the furiously b|
+5987|97|1|4|37|36892.33|0.08|0.08|N|O|1996-10-15|1996-10-27|1996-11-09|NONE|MAIL|le furiously carefully special |
+5988|172|1|1|41|43958.97|0.08|0.03|R|F|1994-01-20|1994-02-06|1994-02-10|COLLECT COD|AIR|the pending, express reque|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/nation.tbl b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/nation.tbl
new file mode 100644
index 0000000..ed3fd5b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/nation.tbl
@@ -0,0 +1,25 @@
+0|ALGERIA|0| haggle. carefully final deposits detect slyly agai|
+1|ARGENTINA|1|al foxes promise slyly according to the regular accounts. bold requests alon|
+2|BRAZIL|1|y alongside of the pending deposits. carefully special packages are about the ironic forges. slyly special |
+3|CANADA|1|eas hang ironic, silent packages. slyly regular packages are furiously over the tithes. fluffily bold|
+4|EGYPT|4|y above the carefully unusual theodolites. final dugouts are quickly across the furiously regular d|
+5|ETHIOPIA|0|ven packages wake quickly. regu|
+6|FRANCE|3|refully final requests. regular, ironi|
+7|GERMANY|3|l platelets. regular accounts x-ray: unusual, regular acco|
+8|INDIA|2|ss excuses cajole slyly across the packages. deposits print aroun|
+9|INDONESIA|2| slyly express asymptotes. regular deposits haggle slyly. carefully ironic hockey players sleep blithely. carefull|
+10|IRAN|4|efully alongside of the slyly final dependencies. |
+11|IRAQ|4|nic deposits boost atop the quickly final requests? quickly regula|
+12|JAPAN|2|ously. final, express gifts cajole a|
+13|JORDAN|4|ic deposits are blithely about the carefully regular pa|
+14|KENYA|0| pending excuses haggle furiously deposits. pending, express pinto beans wake fluffily past t|
+15|MOROCCO|0|rns. blithely bold courts among the closely regular packages use furiously bold platelets?|
+16|MOZAMBIQUE|0|s. ironic, unusual asymptotes wake blithely r|
+17|PERU|1|platelets. blithely pending dependencies use fluffily across the even pinto beans. carefully silent accoun|
+18|CHINA|2|c dependencies. furiously express notornis sleep slyly regular accounts. ideas sleep. depos|
+19|ROMANIA|3|ular asymptotes are about the furious multipliers. express dependencies nag above the ironically ironic account|
+20|SAUDI ARABIA|4|ts. silent requests haggle. closely express packages sleep across the blithely|
+21|VIETNAM|2|hely enticingly express accounts. even, final |
+22|RUSSIA|3| requests against the platelets use never according to the quickly regular pint|
+23|UNITED KINGDOM|3|eans boost carefully special requests. accounts are. carefull|
+24|UNITED STATES|1|y final packages. slow foxes cajole quickly. quickly silent platelets breach ironic accounts. unusual pinto be|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/orders.tbl b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/orders.tbl
new file mode 100644
index 0000000..1ebd663
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/orders.tbl
@@ -0,0 +1,1500 @@
+1|37|O|131251.81|1996-01-02|5-LOW|Clerk#000000951|0|nstructions sleep furiously among |
+2|79|O|40183.29|1996-12-01|1-URGENT|Clerk#000000880|0| foxes. pending accounts at the pending, silent asymptot|
+3|124|F|160882.76|1993-10-14|5-LOW|Clerk#000000955|0|sly final accounts boost. carefully regular ideas cajole carefully. depos|
+4|137|O|31084.79|1995-10-11|5-LOW|Clerk#000000124|0|sits. slyly regular warthogs cajole. regular, regular theodolites acro|
+5|46|F|86615.25|1994-07-30|5-LOW|Clerk#000000925|0|quickly. bold deposits sleep slyly. packages use slyly|
+6|56|F|36468.55|1992-02-21|4-NOT SPECIFIED|Clerk#000000058|0|ggle. special, final requests are against the furiously specia|
+7|40|O|171488.73|1996-01-10|2-HIGH|Clerk#000000470|0|ly special requests |
+32|131|O|116923.00|1995-07-16|2-HIGH|Clerk#000000616|0|ise blithely bold, regular requests. quickly unusual dep|
+33|67|F|99798.76|1993-10-27|3-MEDIUM|Clerk#000000409|0|uriously. furiously final request|
+34|62|O|41670.02|1998-07-21|3-MEDIUM|Clerk#000000223|0|ly final packages. fluffily final deposits wake blithely ideas. spe|
+35|128|O|148789.52|1995-10-23|4-NOT SPECIFIED|Clerk#000000259|0|zzle. carefully enticing deposits nag furio|
+36|116|O|38988.98|1995-11-03|1-URGENT|Clerk#000000358|0| quick packages are blithely. slyly silent accounts wake qu|
+37|88|F|113701.89|1992-06-03|3-MEDIUM|Clerk#000000456|0|kly regular pinto beans. carefully unusual waters cajole never|
+38|125|O|46366.56|1996-08-21|4-NOT SPECIFIED|Clerk#000000604|0|haggle blithely. furiously express ideas haggle blithely furiously regular re|
+39|82|O|219707.84|1996-09-20|3-MEDIUM|Clerk#000000659|0|ole express, ironic requests: ir|
+64|34|F|20065.73|1994-07-16|3-MEDIUM|Clerk#000000661|0|wake fluffily. sometimes ironic pinto beans about the dolphin|
+65|17|P|65883.92|1995-03-18|1-URGENT|Clerk#000000632|0|ular requests are blithely pending orbits-- even requests against the deposit|
+66|130|F|79258.24|1994-01-20|5-LOW|Clerk#000000743|0|y pending requests integrate|
+67|58|O|116227.05|1996-12-19|4-NOT SPECIFIED|Clerk#000000547|0|symptotes haggle slyly around the furiously iron|
+68|29|O|215135.72|1998-04-18|3-MEDIUM|Clerk#000000440|0| pinto beans sleep carefully. blithely ironic deposits haggle furiously acro|
+69|85|F|162176.23|1994-06-04|4-NOT SPECIFIED|Clerk#000000330|0| depths atop the slyly thin deposits detect among the furiously silent accou|
+70|65|F|84651.80|1993-12-18|5-LOW|Clerk#000000322|0| carefully ironic request|
+71|4|O|178821.73|1998-01-24|4-NOT SPECIFIED|Clerk#000000271|0| express deposits along the blithely regul|
+96|109|F|55090.67|1994-04-17|2-HIGH|Clerk#000000395|0|oost furiously. pinto|
+97|22|F|68908.31|1993-01-29|3-MEDIUM|Clerk#000000547|0|hang blithely along the regular accounts. furiously even ideas after the|
+98|106|F|51004.44|1994-09-25|1-URGENT|Clerk#000000448|0|c asymptotes. quickly regular packages should have to nag re|
+99|89|F|92326.79|1994-03-13|4-NOT SPECIFIED|Clerk#000000973|0|e carefully ironic packages. pending|
+100|148|O|141311.01|1998-02-28|4-NOT SPECIFIED|Clerk#000000577|0|heodolites detect slyly alongside of the ent|
+101|28|O|95591.40|1996-03-17|3-MEDIUM|Clerk#000000419|0|ding accounts above the slyly final asymptote|
+102|1|O|113954.89|1997-05-09|2-HIGH|Clerk#000000596|0| slyly according to the asymptotes. carefully final packages integrate furious|
+103|31|O|95563.95|1996-06-20|4-NOT SPECIFIED|Clerk#000000090|0|ges. carefully unusual instructions haggle quickly regular f|
+128|74|F|36333.34|1992-06-15|1-URGENT|Clerk#000000385|0|ns integrate fluffily. ironic asymptotes after the regular excuses nag around |
+129|73|F|188124.55|1992-11-19|5-LOW|Clerk#000000859|0|ing tithes. carefully pending deposits boost about the silently express |
+130|37|F|115717.37|1992-05-08|2-HIGH|Clerk#000000036|0|le slyly unusual, regular packages? express deposits det|
+131|94|F|96596.81|1994-06-08|3-MEDIUM|Clerk#000000625|0|after the fluffily special foxes integrate s|
+132|28|F|118802.62|1993-06-11|3-MEDIUM|Clerk#000000488|0|sits are daringly accounts. carefully regular foxes sleep slyly about the|
+133|44|O|80437.72|1997-11-29|1-URGENT|Clerk#000000738|0|usly final asymptotes |
+134|7|F|154260.84|1992-05-01|4-NOT SPECIFIED|Clerk#000000711|0|lar theodolites boos|
+135|61|O|174569.88|1995-10-21|4-NOT SPECIFIED|Clerk#000000804|0|l platelets use according t|
+160|83|O|86076.86|1996-12-19|4-NOT SPECIFIED|Clerk#000000342|0|thely special sauternes wake slyly of t|
+161|17|F|19056.99|1994-08-31|2-HIGH|Clerk#000000322|0|carefully! special instructions sin|
+162|16|O|2158.13|1995-05-08|3-MEDIUM|Clerk#000000378|0|nts hinder fluffily ironic instructions. express, express excuses |
+163|88|O|125170.86|1997-09-05|3-MEDIUM|Clerk#000000379|0|y final packages. final foxes since the quickly even|
+164|1|F|202660.52|1992-10-21|5-LOW|Clerk#000000209|0|cajole ironic courts. slyly final ideas are slyly. blithely final Tiresias sub|
+165|28|F|141824.23|1993-01-30|4-NOT SPECIFIED|Clerk#000000292|0|across the blithely regular accounts. bold|
+166|109|O|93335.60|1995-09-12|2-HIGH|Clerk#000000440|0|lets. ironic, bold asymptotes kindle|
+167|121|F|52982.23|1993-01-04|4-NOT SPECIFIED|Clerk#000000731|0|s nag furiously bold excuses. fluffily iron|
+192|83|O|133002.55|1997-11-25|5-LOW|Clerk#000000483|0|y unusual platelets among the final instructions integrate rut|
+193|80|F|48053.18|1993-08-08|1-URGENT|Clerk#000000025|0|the furiously final pin|
+194|62|F|114097.63|1992-04-05|3-MEDIUM|Clerk#000000352|0|egular requests haggle slyly regular, regular pinto beans. asymptote|
+195|136|F|120053.52|1993-12-28|3-MEDIUM|Clerk#000000216|0|old forges are furiously sheaves. slyly fi|
+196|65|F|33248.04|1993-03-17|2-HIGH|Clerk#000000988|0|beans boost at the foxes. silent foxes|
+197|34|P|100290.07|1995-04-07|2-HIGH|Clerk#000000969|0|solve quickly about the even braids. carefully express deposits affix care|
+198|112|O|125792.83|1998-01-02|4-NOT SPECIFIED|Clerk#000000331|0|its. carefully ironic requests sleep. furiously express fox|
+199|53|O|80592.44|1996-03-07|2-HIGH|Clerk#000000489|0|g theodolites. special packag|
+224|4|F|155680.60|1994-06-18|4-NOT SPECIFIED|Clerk#000000642|0|r the quickly thin courts. carefully|
+225|34|P|165890.47|1995-05-25|1-URGENT|Clerk#000000177|0|s. blithely ironic accounts wake quickly fluffily special acc|
+226|128|F|180119.22|1993-03-10|2-HIGH|Clerk#000000756|0|s are carefully at the blithely ironic acc|
+227|10|O|46076.46|1995-11-10|5-LOW|Clerk#000000919|0| express instructions. slyly regul|
+228|46|F|2638.98|1993-02-25|1-URGENT|Clerk#000000562|0|es was slyly among the regular foxes. blithely regular dependenci|
+229|112|F|142290.77|1993-12-29|1-URGENT|Clerk#000000628|0|he fluffily even instructions. furiously i|
+230|103|F|107231.60|1993-10-27|1-URGENT|Clerk#000000520|0|odolites. carefully quick requ|
+231|91|F|141554.06|1994-09-29|2-HIGH|Clerk#000000446|0| packages haggle slyly after the carefully ironic instruct|
+256|125|F|106315.25|1993-10-19|4-NOT SPECIFIED|Clerk#000000834|0|he fluffily final ideas might are final accounts. carefully f|
+257|124|O|7102.74|1998-03-28|3-MEDIUM|Clerk#000000680|0|ts against the sly warhorses cajole slyly accounts|
+258|43|F|186669.10|1993-12-29|1-URGENT|Clerk#000000167|0|dencies. blithely quick packages cajole. ruthlessly final accounts|
+259|44|F|75661.70|1993-09-29|4-NOT SPECIFIED|Clerk#000000601|0|ages doubt blithely against the final foxes. carefully express deposits dazzle|
+260|106|O|179292.14|1996-12-10|3-MEDIUM|Clerk#000000960|0|lently regular pinto beans sleep after the slyly e|
+261|47|F|201003.12|1993-06-29|3-MEDIUM|Clerk#000000310|0|ully fluffily brave instructions. furiousl|
+262|31|O|108443.84|1995-11-25|4-NOT SPECIFIED|Clerk#000000551|0|l packages. blithely final pinto beans use carefu|
+263|118|F|79782.56|1994-05-17|2-HIGH|Clerk#000000088|0| pending instructions. blithely un|
+288|8|O|163794.53|1997-02-21|1-URGENT|Clerk#000000109|0|uriously final requests. even, final ideas det|
+289|104|O|131092.67|1997-02-10|3-MEDIUM|Clerk#000000103|0|sily. slyly special excuse|
+290|118|F|62814.89|1994-01-01|4-NOT SPECIFIED|Clerk#000000735|0|efully dogged deposits. furiou|
+291|142|F|66817.05|1994-03-13|1-URGENT|Clerk#000000923|0|dolites. carefully regular pinto beans cajol|
+292|23|F|30783.05|1992-01-13|2-HIGH|Clerk#000000193|0|g pinto beans will have to sleep f|
+293|31|F|37248.78|1992-10-02|2-HIGH|Clerk#000000629|0|re bold, ironic deposits. platelets c|
+294|52|F|30059.47|1993-07-16|3-MEDIUM|Clerk#000000499|0|kly according to the frays. final dolphins affix quickly |
+295|19|F|89345.99|1994-09-29|2-HIGH|Clerk#000000155|0| unusual pinto beans play. regular ideas haggle|
+320|1|O|39835.54|1997-11-21|2-HIGH|Clerk#000000573|0|ar foxes nag blithely|
+321|124|F|62251.15|1993-03-21|3-MEDIUM|Clerk#000000289|0|equests run. blithely final dependencies after the deposits wake caref|
+322|134|F|127068.89|1992-03-19|1-URGENT|Clerk#000000158|0|fully across the slyly bold packages. packages against the quickly regular i|
+323|40|F|79683.42|1994-03-26|1-URGENT|Clerk#000000959|0|arefully pending foxes sleep blithely. slyly express accoun|
+324|106|F|26868.85|1992-03-20|1-URGENT|Clerk#000000352|0| about the ironic, regular deposits run blithely against the excuses|
+325|41|F|71543.41|1993-10-17|5-LOW|Clerk#000000844|0|ly sometimes pending pa|
+326|76|O|229165.17|1995-06-04|2-HIGH|Clerk#000000466|0| requests. furiously ironic asymptotes mold carefully alongside of the blit|
+327|145|P|24468.16|1995-04-17|5-LOW|Clerk#000000992|0|ng the slyly final courts. slyly even escapades eat |
+352|107|F|16003.86|1994-03-08|2-HIGH|Clerk#000000932|0|ke slyly bold pinto beans. blithely regular accounts against the spe|
+353|2|F|179984.42|1993-12-31|5-LOW|Clerk#000000449|0| quiet ideas sleep. even instructions cajole slyly. silently spe|
+354|139|O|157062.70|1996-03-14|2-HIGH|Clerk#000000511|0|ly regular ideas wake across the slyly silent ideas. final deposits eat b|
+355|71|F|69447.25|1994-06-14|5-LOW|Clerk#000000532|0|s. sometimes regular requests cajole. regular, pending accounts a|
+356|148|F|162786.67|1994-06-30|4-NOT SPECIFIED|Clerk#000000944|0|as wake along the bold accounts. even, |
+357|61|O|98723.11|1996-10-09|2-HIGH|Clerk#000000301|0|e blithely about the express, final accounts. quickl|
+358|4|F|226806.66|1993-09-20|2-HIGH|Clerk#000000392|0|l, silent instructions are slyly. silently even de|
+359|79|F|142891.22|1994-12-19|3-MEDIUM|Clerk#000000934|0|n dolphins. special courts above the carefully ironic requests use|
+384|115|F|122785.82|1992-03-03|5-LOW|Clerk#000000206|0|, even accounts use furiously packages. slyly ironic pla|
+385|34|O|50724.06|1996-03-22|5-LOW|Clerk#000000600|0|hless accounts unwind bold pain|
+386|61|F|90380.40|1995-01-25|2-HIGH|Clerk#000000648|0| haggle quickly. stealthily bold asymptotes haggle among the furiously even re|
+387|4|O|130647.18|1997-01-26|4-NOT SPECIFIED|Clerk#000000768|0| are carefully among the quickly even deposits. furiously silent req|
+388|46|F|120533.46|1992-12-16|4-NOT SPECIFIED|Clerk#000000356|0|ar foxes above the furiously ironic deposits nag slyly final reque|
+389|127|F|1984.14|1994-02-17|2-HIGH|Clerk#000000062|0|ing to the regular asymptotes. final, pending foxes about the blithely sil|
+390|103|O|168562.27|1998-04-07|5-LOW|Clerk#000000404|0|xpress asymptotes use among the regular, final pinto b|
+391|112|F|13282.23|1994-11-17|2-HIGH|Clerk#000000256|0|orges thrash fluffil|
+416|41|F|71362.50|1993-09-27|5-LOW|Clerk#000000294|0| the accounts. fluffily bold depo|
+417|55|F|91982.29|1994-02-06|3-MEDIUM|Clerk#000000468|0|ironic, even packages. thinly unusual accounts sleep along the slyly unusual |
+418|95|P|33124.96|1995-04-13|4-NOT SPECIFIED|Clerk#000000643|0|. furiously ironic instruc|
+419|118|O|111597.96|1996-10-01|3-MEDIUM|Clerk#000000376|0|osits. blithely pending theodolites boost carefully|
+420|91|O|198039.23|1995-10-31|4-NOT SPECIFIED|Clerk#000000756|0|leep carefully final excuses. fluffily pending requests unwind carefully above|
+421|40|F|1084.38|1992-02-22|5-LOW|Clerk#000000405|0|egular, even packages according to the final, un|
+422|74|O|106045.89|1997-05-31|4-NOT SPECIFIED|Clerk#000000049|0|aggle carefully across the accounts. regular accounts eat fluffi|
+423|104|O|26981.31|1996-06-01|1-URGENT|Clerk#000000674|0|quests. deposits cajole quickly. furiously bold accounts haggle q|
+448|149|O|114978.03|1995-08-21|3-MEDIUM|Clerk#000000597|0| regular, express foxes use blithely. quic|
+449|97|O|41605.63|1995-07-20|2-HIGH|Clerk#000000841|0|. furiously regular theodolites affix blithely |
+450|49|P|153386.61|1995-03-05|4-NOT SPECIFIED|Clerk#000000293|0|d theodolites. boldly bold foxes since the pack|
+451|100|O|104664.40|1998-05-25|5-LOW|Clerk#000000048|0|nic pinto beans. theodolites poach carefully; |
+452|61|O|2007.48|1997-10-14|1-URGENT|Clerk#000000498|0|t, unusual instructions above the blithely bold pint|
+453|46|O|216826.73|1997-05-26|5-LOW|Clerk#000000504|0|ss foxes. furiously regular ideas sleep according to t|
+454|49|O|23198.24|1995-12-27|5-LOW|Clerk#000000890|0|dolites sleep carefully blithely regular deposits. quickly regul|
+455|13|O|138010.76|1996-12-04|1-URGENT|Clerk#000000796|0| about the final platelets. dependen|
+480|73|F|20530.97|1993-05-08|5-LOW|Clerk#000000004|0|ealthy pinto beans. fluffily regular requests along the special sheaves wake |
+481|31|F|117827.18|1992-10-08|2-HIGH|Clerk#000000230|0|ly final ideas. packages haggle fluffily|
+482|127|O|136634.34|1996-03-26|1-URGENT|Clerk#000000295|0|ts. deposits wake: final acco|
+483|35|O|39793.05|1995-07-11|2-HIGH|Clerk#000000025|0|cross the carefully final e|
+484|55|O|219920.62|1997-01-03|3-MEDIUM|Clerk#000000545|0|grouches use. furiously bold accounts maintain. bold, regular deposits|
+485|101|O|110432.76|1997-03-26|2-HIGH|Clerk#000000105|0| regular ideas nag thinly furiously s|
+486|52|O|185968.15|1996-03-11|4-NOT SPECIFIED|Clerk#000000803|0|riously dolphins. fluffily ironic requ|
+487|109|F|48502.79|1992-08-18|1-URGENT|Clerk#000000086|0|ithely unusual courts eat accordi|
+512|64|P|124661.48|1995-05-20|5-LOW|Clerk#000000814|0|ding requests. carefully express theodolites was quickly. furious|
+513|61|O|63703.92|1995-05-01|2-HIGH|Clerk#000000522|0|regular packages. pinto beans cajole carefully against the even|
+514|76|O|104585.77|1996-04-04|2-HIGH|Clerk#000000094|0| cajole furiously. slyly final excuses cajole. slyly special instructions |
+515|142|F|153720.22|1993-08-29|4-NOT SPECIFIED|Clerk#000000700|0|eposits are furiously furiously silent pinto beans. pending pack|
+516|44|O|10677.86|1998-04-21|2-HIGH|Clerk#000000305|0|lar, unusual platelets are carefully. even courts sleep bold, final pinto bea|
+517|10|O|82197.79|1997-04-07|5-LOW|Clerk#000000359|0|slyly pending deposits cajole quickly packages. furiou|
+518|145|O|223537.09|1998-02-08|2-HIGH|Clerk#000000768|0| the carefully bold accounts. quickly regular excuses are|
+519|64|O|95731.50|1997-10-31|1-URGENT|Clerk#000000985|0|ains doze furiously against the f|
+544|94|F|47627.89|1993-02-17|2-HIGH|Clerk#000000145|0|the special, final accounts. dogged dolphins|
+545|64|O|23476.12|1995-11-07|2-HIGH|Clerk#000000537|0|as. blithely final hockey players about th|
+546|145|O|14790.37|1996-11-01|2-HIGH|Clerk#000000041|0|osits sleep. slyly special dolphins about the q|
+547|100|O|96855.29|1996-06-22|3-MEDIUM|Clerk#000000976|0|ing accounts eat. carefully regular packa|
+548|124|F|99088.75|1994-09-21|1-URGENT|Clerk#000000435|0|arefully express instru|
+549|110|F|141679.41|1992-07-13|1-URGENT|Clerk#000000196|0|ideas alongside of |
+550|25|O|33123.28|1995-08-02|1-URGENT|Clerk#000000204|0|t requests. blithely |
+551|91|O|46355.83|1995-05-30|1-URGENT|Clerk#000000179|0|xpress accounts boost quic|
+576|31|O|18307.45|1997-05-13|3-MEDIUM|Clerk#000000955|0|l requests affix regular requests. final account|
+577|56|F|34768.68|1994-12-19|5-LOW|Clerk#000000154|0| deposits engage stealthil|
+578|94|O|70392.02|1997-01-10|5-LOW|Clerk#000000281|0|e blithely even packages. slyly pending platelets bes|
+579|68|O|120828.12|1998-03-11|2-HIGH|Clerk#000000862|0| regular instructions. blithely even p|
+580|61|O|88219.12|1997-07-05|2-HIGH|Clerk#000000314|0|tegrate fluffily regular accou|
+581|70|O|126066.00|1997-02-23|4-NOT SPECIFIED|Clerk#000000239|0| requests. even requests use slyly. blithely ironic |
+582|50|O|129004.81|1997-10-21|1-URGENT|Clerk#000000378|0|n pinto beans print a|
+583|49|O|127817.38|1997-03-19|3-MEDIUM|Clerk#000000792|0|efully express requests. a|
+608|26|O|62567.99|1996-02-28|3-MEDIUM|Clerk#000000995|0|nic waters wake slyly slyly expre|
+609|127|F|21088.59|1994-06-01|3-MEDIUM|Clerk#000000348|0|- ironic gifts believe furiously ca|
+610|52|O|175142.28|1995-08-02|1-URGENT|Clerk#000000610|0|totes. ironic, unusual packag|
+611|106|F|73907.63|1993-01-27|1-URGENT|Clerk#000000401|0|ounts detect furiously ac|
+612|82|F|145695.42|1992-10-21|3-MEDIUM|Clerk#000000759|0|boost quickly quickly final excuses. final foxes use bravely afte|
+613|139|O|33396.35|1995-06-18|2-HIGH|Clerk#000000172|0|ts hinder among the deposits. fluffily ironic depos|
+614|134|F|218116.21|1992-12-01|2-HIGH|Clerk#000000388|0| deposits! even, daring theodol|
+615|67|F|32890.89|1992-05-09|5-LOW|Clerk#000000388|0|t to promise asymptotes. packages haggle alongside of the fluffil|
+640|97|F|145495.62|1993-01-23|2-HIGH|Clerk#000000433|0|r, unusual accounts boost carefully final ideas. slyly silent theod|
+641|133|F|120626.49|1993-08-30|5-LOW|Clerk#000000175|0|ents cajole furiously about the quickly silent pac|
+642|40|F|22994.51|1993-12-16|3-MEDIUM|Clerk#000000357|0| among the requests wake slyly alongside of th|
+643|58|P|180396.95|1995-03-25|2-HIGH|Clerk#000000354|0|g dependencies. regular accounts |
+644|8|F|201268.06|1992-05-01|1-URGENT|Clerk#000000550|0| blithely unusual platelets haggle ironic, special excuses. excuses unwi|
+645|115|F|234763.73|1994-12-03|2-HIGH|Clerk#000000090|0|quickly daring theodolites across the regu|
+646|52|F|142070.65|1994-11-22|2-HIGH|Clerk#000000203|0|carefully even foxes. fina|
+647|143|O|56449.23|1997-08-07|1-URGENT|Clerk#000000270|0|egular pearls. carefully express asymptotes are. even account|
+672|109|F|89877.09|1994-04-14|5-LOW|Clerk#000000106|0|egular requests are furiously according to |
+673|80|F|21137.08|1994-03-10|1-URGENT|Clerk#000000448|0| special pinto beans use quickly furiously even depende|
+674|34|F|27204.60|1992-08-29|5-LOW|Clerk#000000448|0|ully special deposits. furiously final warhorses affix carefully. fluffily f|
+675|13|O|125188.72|1997-07-31|2-HIGH|Clerk#000000168|0|ffily between the careful|
+676|38|O|163966.67|1996-12-13|2-HIGH|Clerk#000000248|0|the final deposits. special, pending|
+677|124|F|147915.68|1993-11-24|3-MEDIUM|Clerk#000000824|0|uriously special pinto beans cajole carefully. fi|
+678|131|F|135761.05|1993-02-27|5-LOW|Clerk#000000530|0|. blithely final somas about the|
+679|49|O|8945.03|1995-12-15|2-HIGH|Clerk#000000853|0|tealthy, final pinto beans haggle slyly. pending platelets about the special, |
+704|85|O|56210.26|1996-11-21|3-MEDIUM|Clerk#000000682|0|blithely pending platelets wake alongside of the final, iron|
+705|43|O|83773.49|1997-02-13|4-NOT SPECIFIED|Clerk#000000294|0|ithely regular dependencies. express, even packages sleep slyly pending t|
+706|148|O|23973.60|1995-09-09|1-URGENT|Clerk#000000448|0|g the packages. deposits caj|
+707|118|F|58218.35|1994-11-20|3-MEDIUM|Clerk#000000199|0| ideas about the silent, bold deposits nag dolphins|
+708|32|O|100445.59|1998-07-03|3-MEDIUM|Clerk#000000101|0|lphins cajole about t|
+709|37|O|72055.87|1998-04-21|1-URGENT|Clerk#000000461|0|ons alongside of the carefully bold pinto bea|
+710|133|F|208974.42|1993-01-02|5-LOW|Clerk#000000026|0| regular, regular requests boost. fluffily re|
+711|64|F|92484.70|1993-09-23|4-NOT SPECIFIED|Clerk#000000856|0|its. fluffily regular gifts are furi|
+736|47|O|130204.17|1998-06-21|5-LOW|Clerk#000000881|0|refully of the final pi|
+737|121|F|12984.85|1992-04-26|5-LOW|Clerk#000000233|0|ake blithely express, ironic theodolites. blithely special accounts wa|
+738|22|F|114145.18|1993-03-02|4-NOT SPECIFIED|Clerk#000000669|0|ly even foxes. furiously regular accounts cajole ca|
+739|1|O|159171.69|1998-05-31|5-LOW|Clerk#000000900|0| against the slyly ironic packages nag slyly ironic|
+740|44|O|83490.99|1995-07-16|3-MEDIUM|Clerk#000000583|0|courts haggle furiously across the final, regul|
+741|106|O|47985.98|1998-07-07|2-HIGH|Clerk#000000295|0|ic instructions. slyly express instructions solv|
+742|103|F|207632.55|1994-12-23|5-LOW|Clerk#000000543|0|equests? slyly ironic dolphins boost carefully above the blithely|
+743|79|O|23614.89|1996-10-04|4-NOT SPECIFIED|Clerk#000000933|0|eans. furiously ironic deposits sleep carefully carefully qui|
+768|98|O|220636.82|1996-08-20|3-MEDIUM|Clerk#000000411|0|jole slyly ironic packages. slyly even idea|
+769|80|F|43092.76|1993-06-02|3-MEDIUM|Clerk#000000172|0|ggle furiously. ironic packages haggle slyly. bold platelets affix s|
+770|32|O|64271.75|1998-05-23|5-LOW|Clerk#000000572|0|heodolites. furiously special pinto beans cajole pac|
+771|46|O|105302.05|1995-06-17|1-URGENT|Clerk#000000105|0|s. furiously final instructions across the deposit|
+772|97|F|128234.96|1993-04-17|2-HIGH|Clerk#000000430|0|s boost blithely fluffily idle ideas? fluffily even pin|
+773|133|F|146862.27|1993-09-26|3-MEDIUM|Clerk#000000307|0|tions are quickly accounts. accounts use bold, even pinto beans. gifts ag|
+774|80|O|145857.60|1995-12-04|1-URGENT|Clerk#000000883|0|tealthily even depths|
+775|134|F|59455.61|1995-03-18|1-URGENT|Clerk#000000191|0|kly express requests. fluffily silent accounts poach furiously|
+800|56|O|87892.38|1998-07-14|2-HIGH|Clerk#000000213|0|y alongside of the pending packages? final platelets nag fluffily carefu|
+801|118|F|127717.72|1992-02-18|1-URGENT|Clerk#000000186|0|iously from the furiously enticing reques|
+802|137|F|156381.95|1995-01-05|1-URGENT|Clerk#000000516|0|posits. ironic, pending requests cajole. even theodol|
+803|16|O|27629.66|1997-04-29|5-LOW|Clerk#000000260|0|ic instructions. even deposits haggle furiously at the deposits-- regular de|
+804|50|F|94400.43|1993-03-12|3-MEDIUM|Clerk#000000931|0|s. blithely final foxes are about the packag|
+805|127|O|90042.41|1995-07-05|4-NOT SPECIFIED|Clerk#000000856|0|y according to the fluffily |
+806|131|O|26839.16|1996-06-20|2-HIGH|Clerk#000000240|0| the ironic packages wake carefully fina|
+807|145|F|222392.53|1993-11-24|3-MEDIUM|Clerk#000000012|0|refully special tithes. blithely regular accoun|
+832|29|F|68494.08|1992-04-19|5-LOW|Clerk#000000495|0|xes. bravely regular packages sleep up the furiously bold accou|
+833|56|F|49033.69|1994-02-13|3-MEDIUM|Clerk#000000437|0|ts haggle quickly across the slyl|
+834|43|F|46459.92|1994-05-23|3-MEDIUM|Clerk#000000805|0| sleep. quickly even foxes are boldly. slyly express requests use slyly|
+835|65|O|62430.67|1995-10-08|4-NOT SPECIFIED|Clerk#000000416|0|s about the carefully special foxes haggle quickly about the|
+836|70|O|72843.48|1996-11-25|4-NOT SPECIFIED|Clerk#000000729|0|ely bold excuses sleep regular ideas. furiously unusual ideas wake furiou|
+837|116|F|60918.41|1994-06-15|4-NOT SPECIFIED|Clerk#000000563|0|kages sleep slyly above the ironic, final orbits|
+838|17|O|82918.36|1998-01-29|5-LOW|Clerk#000000213|0| slyly around the slyly even|
+839|28|O|70182.63|1995-08-08|1-URGENT|Clerk#000000951|0|the carefully even platelets. furiously unusual fo|
+864|139|O|74710.74|1997-08-17|1-URGENT|Clerk#000000036|0|ly after the slyly regular deposits. express, regular asymptotes nag ca|
+865|4|F|70430.54|1993-05-04|3-MEDIUM|Clerk#000000337|0|. special packages wake after the carefully final accounts. express pinto be|
+866|40|F|4766.19|1992-11-28|3-MEDIUM|Clerk#000000718|0|ins after the even, even accounts nod blithel|
+867|26|F|7471.75|1993-11-16|3-MEDIUM|Clerk#000000877|0|pades nag quickly final, |
+868|104|F|127345.45|1992-06-09|4-NOT SPECIFIED|Clerk#000000782|0|onic theodolites print carefully. blithely dogge|
+869|136|O|58932.19|1997-01-12|2-HIGH|Clerk#000000245|0|ar sheaves are slowly. slyly even attainments boost theodolites. furiously|
+870|34|F|40492.37|1993-06-20|4-NOT SPECIFIED|Clerk#000000123|0|blithely ironic ideas nod. sly, r|
+871|16|O|172861.58|1995-11-15|5-LOW|Clerk#000000882|0|oss the ironic theodolites.|
+896|2|F|169847.63|1993-03-09|1-URGENT|Clerk#000000187|0|inal packages eat blithely according to the warhorses. furiously quiet de|
+897|49|P|57697.44|1995-03-20|1-URGENT|Clerk#000000316|0| wake quickly against |
+898|55|F|101020.75|1993-06-03|2-HIGH|Clerk#000000611|0|. unusual pinto beans haggle quickly across |
+899|109|O|125562.09|1998-04-08|5-LOW|Clerk#000000575|0|rts engage carefully final theodolites.|
+900|46|F|120073.51|1994-10-01|4-NOT SPECIFIED|Clerk#000000060|0| fluffily express deposits nag furiousl|
+901|13|O|81826.12|1998-07-21|4-NOT SPECIFIED|Clerk#000000929|0|lyly even foxes are furious, silent requests. requests about the quickly |
+902|10|F|37348.62|1994-07-27|4-NOT SPECIFIED|Clerk#000000811|0|yly final requests over the furiously regula|
+903|11|O|109351.87|1995-07-07|4-NOT SPECIFIED|Clerk#000000793|0|e slyly about the final pl|
+928|67|F|228136.49|1995-03-02|5-LOW|Clerk#000000450|0|ithely express pinto beans. |
+929|83|F|109301.02|1992-10-02|2-HIGH|Clerk#000000160|0|its. furiously even foxes affix carefully finally silent accounts. express req|
+930|131|F|199102.23|1994-12-17|1-URGENT|Clerk#000000004|0| accounts nag slyly. ironic, ironic accounts wake blithel|
+931|103|F|117909.23|1992-12-07|1-URGENT|Clerk#000000881|0|ss packages haggle furiously express, regular deposits. even, e|
+932|41|O|40234.50|1997-05-16|2-HIGH|Clerk#000000218|0|ly express instructions boost furiously reg|
+933|97|F|71349.30|1992-08-05|4-NOT SPECIFIED|Clerk#000000752|0|ial courts wake permanently against the furiously regular ideas. unusual |
+934|52|O|17213.59|1996-07-03|1-URGENT|Clerk#000000229|0|ts integrate carefully. sly, regular deposits af|
+935|50|O|97733.87|1997-09-24|5-LOW|Clerk#000000180|0|iously final deposits cajole. blithely even packages |
+960|35|F|63537.13|1994-09-21|3-MEDIUM|Clerk#000000120|0|regular accounts. requests|
+961|56|P|158893.16|1995-06-04|4-NOT SPECIFIED|Clerk#000000720|0|ons nag furiously among the quickl|
+962|37|F|98258.73|1994-05-06|5-LOW|Clerk#000000463|0|ments nag deposits. fluffily ironic a|
+963|26|F|53287.25|1994-05-26|3-MEDIUM|Clerk#000000497|0|uses haggle carefully. slyly even dependencies after the packages ha|
+964|76|O|131146.47|1995-05-20|3-MEDIUM|Clerk#000000657|0|print blithely ironic, careful theodolit|
+965|70|P|41758.44|1995-05-15|5-LOW|Clerk#000000218|0|iously special packages. slyly pending requests are carefully |
+966|14|O|120516.93|1998-04-30|2-HIGH|Clerk#000000239|0|special deposits. furious|
+967|110|F|179287.95|1992-06-21|3-MEDIUM|Clerk#000000167|0|excuses engage quickly bold dep|
+992|55|O|133665.12|1997-11-11|3-MEDIUM|Clerk#000000875|0|ts. regular pinto beans thrash carefully sl|
+993|80|O|198238.65|1995-09-10|3-MEDIUM|Clerk#000000894|0|quickly express accounts among the furiously bol|
+994|2|F|41433.48|1994-04-20|5-LOW|Clerk#000000497|0|ole. slyly bold excuses nag caref|
+995|116|P|135157.92|1995-05-31|3-MEDIUM|Clerk#000000439|0|deas. blithely final deposits play. express accounts wake blithely caref|
+996|71|O|47447.63|1997-12-29|1-URGENT|Clerk#000000497|0|arefully final packages into the slyly final requests affix blit|
+997|109|O|27561.82|1997-05-19|2-HIGH|Clerk#000000651|0|ly express depths. furiously final requests haggle furiously. carefu|
+998|32|F|65269.38|1994-11-26|4-NOT SPECIFIED|Clerk#000000956|0|ronic dolphins. ironic, bold ideas haggle furiously furious|
+999|61|F|145249.13|1993-09-05|5-LOW|Clerk#000000464|0|pitaphs sleep. regular accounts use. f|
+1024|4|O|176084.63|1997-12-23|5-LOW|Clerk#000000903|0| blithely. even, express theodolites cajole slyly across|
+1025|103|F|82034.03|1995-05-05|2-HIGH|Clerk#000000376|0|ross the slyly final pa|
+1026|73|O|36464.76|1997-06-04|5-LOW|Clerk#000000223|0|s wake blithely. special acco|
+1027|128|F|112770.89|1992-06-03|3-MEDIUM|Clerk#000000241|0|equests cajole. slyly final pinto bean|
+1028|70|F|153864.67|1994-01-01|2-HIGH|Clerk#000000131|0|ts are. final, silent deposits are among the fl|
+1029|130|F|47440.91|1994-06-21|2-HIGH|Clerk#000000700|0|quests sleep. slyly even foxes wake quickly final theodolites. clo|
+1030|134|F|16346.94|1994-06-15|5-LOW|Clerk#000000422|0|ully ironic accounts sleep carefully. requests are carefully alongside of the |
+1031|4|F|128024.71|1994-09-01|3-MEDIUM|Clerk#000000448|0|s; ironic theodolites along the carefully ex|
+1056|28|F|38446.39|1995-02-11|1-URGENT|Clerk#000000125|0|t, even deposits hang about the slyly special i|
+1057|76|F|108107.42|1992-02-20|1-URGENT|Clerk#000000124|0|cuses dazzle carefully careful, ironic pinto beans. carefully even theod|
+1058|53|F|89359.11|1993-04-26|3-MEDIUM|Clerk#000000373|0|kly pending courts haggle. blithely regular sheaves integrate carefully fi|
+1059|127|F|198360.22|1994-02-27|1-URGENT|Clerk#000000104|0|en accounts. carefully bold packages cajole daringly special depende|
+1060|140|F|121994.04|1993-02-21|3-MEDIUM|Clerk#000000989|0|l platelets sleep quickly slyly special requests. furiously |
+1061|103|O|166947.75|1998-05-15|5-LOW|Clerk#000000576|0|uests sleep at the packages. fur|
+1062|106|O|39805.04|1997-01-15|1-URGENT|Clerk#000000152|0|eposits use blithely |
+1063|37|F|41392.31|1994-04-02|2-HIGH|Clerk#000000024|0|deposits nag quickly regular deposits. quickl|
+1088|148|F|47120.41|1992-05-21|5-LOW|Clerk#000000347|0|counts are blithely. platelets print. carefully |
+1089|49|O|103192.74|1996-05-04|4-NOT SPECIFIED|Clerk#000000226|0|ns haggle ruthlessly. even requests are quickly abov|
+1090|19|O|32929.30|1997-11-15|2-HIGH|Clerk#000000300|0| furiously regular platelets haggle along the slyly unusual foxes! |
+1091|83|O|35795.22|1996-08-27|1-URGENT|Clerk#000000549|0| even pinto beans haggle quickly alongside of the eve|
+1092|124|P|85552.21|1995-03-04|3-MEDIUM|Clerk#000000006|0|re quickly along the blithe|
+1093|101|O|79189.58|1997-07-31|4-NOT SPECIFIED|Clerk#000000159|0| after the carefully ironic requests. carefully ironic packages wake fluffil|
+1094|145|O|9006.25|1997-12-24|3-MEDIUM|Clerk#000000570|0|beans affix furiously about the pending, even deposits. finally pendi|
+1095|145|O|178491.24|1995-08-22|3-MEDIUM|Clerk#000000709|0|sly bold requests cajole carefully according to|
+1120|140|O|107958.62|1997-11-07|3-MEDIUM|Clerk#000000319|0|lly special requests. slyly pending platelets are quickly pending requ|
+1121|29|O|241837.88|1997-01-13|3-MEDIUM|Clerk#000000541|0|r escapades. deposits above the fluffily bold requests hag|
+1122|121|O|179747.47|1997-01-10|1-URGENT|Clerk#000000083|0|uffily carefully final theodolites. furiously express packages affix|
+1123|73|O|93259.93|1996-08-03|3-MEDIUM|Clerk#000000929|0|uriously pending requests. slyly regular instruction|
+1124|80|O|141858.97|1998-07-30|5-LOW|Clerk#000000326|0|regular pinto beans along the fluffily silent packages|
+1125|25|F|80438.38|1994-10-27|2-HIGH|Clerk#000000510|0|ithely final requests. i|
+1126|145|O|59982.31|1998-01-28|4-NOT SPECIFIED|Clerk#000000928|0|d slyly regular ideas: special ideas believe slyly. slyly ironic sheaves w|
+1127|58|O|103320.91|1995-09-19|4-NOT SPECIFIED|Clerk#000000397|0|usly silent, regular pinto beans. blithely express requests boos|
+1152|49|F|51775.54|1994-08-14|4-NOT SPECIFIED|Clerk#000000496|0|equests. deposits ab|
+1153|121|O|220727.97|1996-04-18|5-LOW|Clerk#000000059|0| across the pending deposi|
+1154|37|F|192417.85|1992-02-15|1-URGENT|Clerk#000000268|0|old asymptotes are special requests. blithely even deposits sleep furiously|
+1155|149|O|126902.81|1997-10-06|2-HIGH|Clerk#000000164|0|c deposits haggle among the ironic, even requests. carefully ironic sheaves n|
+1156|133|O|217682.81|1996-10-19|1-URGENT|Clerk#000000200|0| blithely ironic dolphins. furiously pendi|
+1157|97|O|85394.06|1998-01-14|4-NOT SPECIFIED|Clerk#000000207|0|out the regular excuses boost carefully against the furio|
+1158|142|O|31075.51|1996-06-30|2-HIGH|Clerk#000000549|0|integrate slyly furiously ironic deposit|
+1159|70|F|55553.68|1992-09-18|3-MEDIUM|Clerk#000000992|0|ts may sleep. requests according to the|
+1184|89|O|39700.29|1997-10-26|5-LOW|Clerk#000000777|0|iously even packages haggle fluffily care|
+1185|74|F|47033.21|1992-08-24|5-LOW|Clerk#000000344|0| even escapades are. package|
+1186|59|O|82026.18|1996-08-15|4-NOT SPECIFIED|Clerk#000000798|0|ingly regular pinto beans: instructi|
+1187|134|F|85948.02|1992-11-20|3-MEDIUM|Clerk#000000047|0|s after the furiously final deposits boost slyly under the|
+1188|20|O|54655.07|1996-04-11|2-HIGH|Clerk#000000256|0|ully ironic deposits. slyl|
+1189|46|F|71017.99|1994-04-09|1-URGENT|Clerk#000000243|0|f the even accounts. courts print blithely ironic accounts. sile|
+1190|13|O|31043.39|1997-03-16|5-LOW|Clerk#000000575|0|ccounts above the foxes integrate carefully after the |
+1191|112|O|28623.04|1995-11-07|3-MEDIUM|Clerk#000000011|0|uests nag furiously. carefully even requests|
+1216|122|F|68056.57|1992-12-07|5-LOW|Clerk#000000918|0|nal foxes around the e|
+1217|7|F|40982.08|1992-04-26|4-NOT SPECIFIED|Clerk#000000538|0| foxes nag quickly. ironic excuses nod. blithely pending|
+1218|10|F|99834.47|1994-06-20|4-NOT SPECIFIED|Clerk#000000994|0|s cajole. special, silent deposits about the theo|
+1219|28|O|10163.56|1995-10-05|3-MEDIUM|Clerk#000000800|0|od carefully. slyly final dependencies across the even fray|
+1220|49|O|122157.14|1996-08-29|1-URGENT|Clerk#000000712|0|inal theodolites wake. fluffily ironic asymptotes cajol|
+1221|14|F|117397.16|1992-04-19|4-NOT SPECIFIED|Clerk#000000852|0| detect against the silent, even deposits. carefully ironic|
+1222|10|F|47623.94|1993-02-05|3-MEDIUM|Clerk#000000811|0|theodolites use quickly even accounts. carefully final asympto|
+1223|10|O|26714.67|1996-05-25|4-NOT SPECIFIED|Clerk#000000238|0|posits was blithely fr|
+1248|49|F|210713.88|1992-01-02|1-URGENT|Clerk#000000890|0|t the carefully regular dugouts. s|
+1249|149|F|45889.09|1994-01-05|1-URGENT|Clerk#000000095|0|al ideas sleep above the pending pin|
+1250|37|F|12907.62|1992-09-29|4-NOT SPECIFIED|Clerk#000000652|0|ts after the fluffily pending instructions use slyly about the s|
+1251|38|O|109536.55|1997-10-30|1-URGENT|Clerk#000000276|0|, brave sauternes. deposits boost fluffily.|
+1252|149|O|93403.05|1997-08-04|5-LOW|Clerk#000000348|0|ng the slyly regular excuses. special courts nag furiously blithely e|
+1253|115|F|92730.74|1993-01-26|1-URGENT|Clerk#000000775|0| requests sleep furiously even foxes. ruthless packag|
+1254|70|O|94649.25|1995-12-22|1-URGENT|Clerk#000000607|0| pinto beans. carefully regular request|
+1255|122|F|62518.31|1994-05-30|4-NOT SPECIFIED|Clerk#000000798|0|ct slyly regular accounts. quick|
+1280|97|F|91664.85|1993-01-11|5-LOW|Clerk#000000160|0|posits thrash quickly after the theodolites. furiously iro|
+1281|62|F|165454.51|1994-12-11|1-URGENT|Clerk#000000430|0|counts. carefully pending accounts eat |
+1282|116|F|61297.42|1992-02-29|4-NOT SPECIFIED|Clerk#000000168|0|he quickly special packages. furiously final re|
+1283|118|O|202623.92|1996-08-30|4-NOT SPECIFIED|Clerk#000000260|0| pinto beans boost slyly ac|
+1284|134|O|106122.38|1996-01-07|2-HIGH|Clerk#000000492|0|s. blithely silent deposits s|
+1285|11|F|139124.72|1992-06-01|1-URGENT|Clerk#000000423|0|cial deposits cajole after the ironic requests. p|
+1286|109|F|207291.83|1993-05-14|4-NOT SPECIFIED|Clerk#000000939|0| deposits use carefully from the excuses. slyly bold p|
+1287|19|F|131432.42|1994-07-05|2-HIGH|Clerk#000000288|0|ly ironic dolphins integrate furiously among the final packages. st|
+1312|112|F|58111.00|1994-05-19|3-MEDIUM|Clerk#000000538|0|n, express accounts across the ironic|
+1313|148|F|46598.65|1994-09-13|1-URGENT|Clerk#000000774|0|ld accounts. regular deposits cajole. ironically pending theodolites use car|
+1314|143|F|56207.66|1994-05-13|3-MEDIUM|Clerk#000000485|0|ickly blithe packages nod ideas. furiously bold braids boost around the car|
+1315|22|O|121935.23|1998-03-22|5-LOW|Clerk#000000840|0|final theodolites alongside of the carefu|
+1316|16|F|163746.47|1993-12-03|1-URGENT|Clerk#000000857|0|ully bold theodolites? pending, bold pin|
+1317|100|P|139714.71|1995-05-19|2-HIGH|Clerk#000000373|0|sts. furiously special deposits lose fur|
+1318|128|O|81663.65|1998-06-27|3-MEDIUM|Clerk#000000581|0|s hang bold requests. pending, re|
+1319|32|O|31103.83|1996-09-27|2-HIGH|Clerk#000000257|0|y across the ruthlessly ironic accounts. unusu|
+1344|17|F|43809.37|1992-04-16|5-LOW|Clerk#000000178|0|omise close, silent requests. pending theodolites boost pending |
+1345|95|F|111207.93|1992-10-28|5-LOW|Clerk#000000447|0| regular tithes. quickly fluffy de|
+1346|76|F|171975.62|1992-06-18|2-HIGH|Clerk#000000374|0|ges sleep quickly-- even pint|
+1347|41|O|173444.60|1997-06-20|5-LOW|Clerk#000000977|0|he furiously even foxes use carefully express req|
+1348|19|O|94135.77|1998-04-18|5-LOW|Clerk#000000206|0|tly. quickly even deposi|
+1349|64|O|46376.09|1997-10-26|1-URGENT|Clerk#000000543|0|yly! blithely special theodolites cajole. unusual, reg|
+1350|52|F|49305.98|1993-08-24|1-URGENT|Clerk#000000635|0|iously about the blithely special a|
+1351|106|O|24637.96|1998-04-20|1-URGENT|Clerk#000000012|0| cajole. regular, special re|
+1376|47|O|23984.88|1997-05-04|4-NOT SPECIFIED|Clerk#000000730|0|der furiously final, final frets. carefull|
+1377|20|O|108334.30|1998-04-24|4-NOT SPECIFIED|Clerk#000000625|0|lly across the blithely express accounts. ironic excuses promise carefully de|
+1378|20|O|118495.12|1996-03-09|4-NOT SPECIFIED|Clerk#000000705|0| furiously even tithes cajole slyly among the quick|
+1379|65|O|84627.76|1998-05-25|5-LOW|Clerk#000000861|0|y deposits are caref|
+1380|137|O|94969.41|1996-07-07|3-MEDIUM|Clerk#000000969|0|inal deposits wake slyly daringly even requests. bold, even foxe|
+1381|127|O|58212.22|1998-05-25|3-MEDIUM|Clerk#000000107|0|even requests breach after the bold, ironic instructions. slyly even|
+1382|133|F|173522.71|1993-08-17|5-LOW|Clerk#000000241|0|fully final packages sl|
+1383|121|F|34797.72|1993-04-27|2-HIGH|Clerk#000000785|0|ts. express requests sleep blithel|
+1408|55|O|183965.61|1997-12-26|4-NOT SPECIFIED|Clerk#000000942|0|t the quickly final asymptotes. unusual|
+1409|143|F|72440.52|1992-12-31|4-NOT SPECIFIED|Clerk#000000065|0|ructions. furiously unusual excuses are regular, unusual theodolites. fin|
+1410|113|O|114879.19|1997-04-12|5-LOW|Clerk#000000123|0|iously along the bravely regular dolphins. pinto beans cajole furiously sp|
+1411|95|F|164462.61|1994-12-21|2-HIGH|Clerk#000000566|0|s. furiously special excuses across the pending pinto beans haggle sp|
+1412|53|F|78676.54|1993-03-13|4-NOT SPECIFIED|Clerk#000000083|0|uffily daring theodolit|
+1413|91|O|75733.58|1997-06-14|3-MEDIUM|Clerk#000000342|0|, ironic instructions. carefully even packages dazzle|
+1414|77|O|38057.81|1995-08-16|1-URGENT|Clerk#000000883|0|ccounts. ironic foxes haggle car|
+1415|79|F|24654.79|1994-05-29|4-NOT SPECIFIED|Clerk#000000601|0|rays. blithely final ideas affix quickl|
+1440|98|O|50201.16|1995-08-10|5-LOW|Clerk#000000956|0| pending requests. closely s|
+1441|122|O|156477.94|1997-03-06|4-NOT SPECIFIED|Clerk#000000156|0|ter the excuses. ironic dependencies m|
+1442|112|F|7108.12|1994-07-05|4-NOT SPECIFIED|Clerk#000000935|0|nal pinto beans. slyly ironic ideas cajol|
+1443|44|O|44672.03|1996-12-16|5-LOW|Clerk#000000185|0|x blithely against the carefully final somas. even asymptotes are. quickly spe|
+1444|134|F|207907.60|1994-12-06|3-MEDIUM|Clerk#000000783|0|ove the bold accounts cajole fluffily about|
+1445|115|F|154653.32|1995-01-10|3-MEDIUM|Clerk#000000211|0|even packages wake fluffily |
+1446|41|O|27663.16|1998-02-16|5-LOW|Clerk#000000274|0|lly regular notornis above the requests sleep final accounts! |
+1447|91|F|108171.38|1992-10-15|2-HIGH|Clerk#000000880|0|inly against the blithely pending excuses. regular, pe|
+1472|149|O|65331.05|1996-10-06|5-LOW|Clerk#000000303|0|y special dolphins around the final dependencies wake quick|
+1473|94|O|80624.38|1997-03-17|3-MEDIUM|Clerk#000000960|0|furiously close accoun|
+1474|70|F|51697.18|1995-01-09|1-URGENT|Clerk#000000438|0|detect quickly above the carefully even |
+1475|5|O|185496.66|1997-11-12|2-HIGH|Clerk#000000972|0|cally final packages boost. blithely ironic packa|
+1476|145|O|18795.62|1996-06-27|2-HIGH|Clerk#000000673|0|ding accounts hinder alongside of the quickly pending requests. fluf|
+1477|76|O|231831.35|1997-08-24|5-LOW|Clerk#000000612|0|ly bold foxes. final ideas would cajo|
+1478|50|O|20791.50|1997-08-03|2-HIGH|Clerk#000000827|0|lessly. carefully express|
+1479|16|O|31471.04|1995-12-16|4-NOT SPECIFIED|Clerk#000000697|0|he furiously even foxes. thinly bold deposits|
+1504|2|F|89399.40|1992-08-28|3-MEDIUM|Clerk#000000381|0|, brave deposits. bold de|
+1505|37|F|55892.35|1992-08-21|2-HIGH|Clerk#000000544|0|s. slyly ironic packages cajole. carefully regular packages haggle |
+1506|148|F|195844.84|1992-09-21|3-MEDIUM|Clerk#000000620|0| dependencies. accounts affix blithely slowly unusual deposits. slyly regular |
+1507|121|F|96166.92|1993-10-14|3-MEDIUM|Clerk#000000305|0|stealthy, ironic de|
+1508|103|O|151282.65|1998-04-10|5-LOW|Clerk#000000117|0| after the furiously regular pinto beans hang slyly quickly ironi|
+1509|64|F|180455.98|1993-07-08|5-LOW|Clerk#000000770|0|the regular ideas. regul|
+1510|53|O|154590.05|1996-09-17|5-LOW|Clerk#000000128|0|ld carefully. furiously final asymptotes haggle furiously|
+1511|79|O|59651.38|1996-12-22|4-NOT SPECIFIED|Clerk#000000386|0|ts above the depend|
+1536|94|O|5184.26|1997-01-26|3-MEDIUM|Clerk#000000117|0|ges are! furiously final deposits cajole iron|
+1537|109|F|108317.51|1992-02-15|4-NOT SPECIFIED|Clerk#000000862|0|g to the even deposits. ironic, final packages |
+1538|29|O|179554.41|1995-06-18|4-NOT SPECIFIED|Clerk#000000258|0| instructions. regular theod|
+1539|112|F|39612.63|1995-03-10|5-LOW|Clerk#000000840|0|nstructions boost pa|
+1540|16|F|128014.15|1992-08-05|2-HIGH|Clerk#000000927|0|r ideas hinder blithe|
+1541|94|P|47286.32|1995-05-18|1-URGENT|Clerk#000000906|0|y. slyly ironic warhorses around the furiously regul|
+1542|143|F|132972.24|1993-09-15|3-MEDIUM|Clerk#000000435|0|t the furiously close deposits do was f|
+1543|52|O|139047.22|1997-02-20|1-URGENT|Clerk#000000398|0|unts. furiously pend|
+1568|17|O|76119.72|1997-01-30|4-NOT SPECIFIED|Clerk#000000554|0|d notornis. carefully |
+1569|104|O|87803.55|1998-04-02|5-LOW|Clerk#000000786|0|orbits. fluffily even decoys serve blithely. furiously furious realms nag acro|
+1570|124|O|35589.57|1998-03-16|1-URGENT|Clerk#000000745|0|pinto beans haggle furiousl|
+1571|103|F|151404.78|1992-12-05|2-HIGH|Clerk#000000565|0|ously furiously bold warthogs. slyly ironic instructions are quickly a|
+1572|11|O|47232.79|1996-02-24|2-HIGH|Clerk#000000994|0|fluffily ironic accounts haggle blithely final platelets! slyly regular foxes|
+1573|148|F|86918.57|1992-12-28|2-HIGH|Clerk#000000940|0|ess, ironic deposits use along the carefu|
+1574|134|O|179923.54|1996-12-12|3-MEDIUM|Clerk#000000809|0| ideas hinder after the carefully unusual |
+1575|145|O|197031.52|1995-09-13|3-MEDIUM|Clerk#000000497|0|. furiously regular dep|
+1600|94|F|130515.61|1993-03-03|3-MEDIUM|Clerk#000000627|0|tions cajole quietly above the regular, silent requests. slyly fin|
+1601|53|F|73962.95|1994-08-27|5-LOW|Clerk#000000469|0|ent deposits are ca|
+1602|1|F|4225.26|1993-08-05|5-LOW|Clerk#000000660|0|deposits. busily silent instructions haggle furiously. fin|
+1603|2|F|29305.47|1993-07-31|4-NOT SPECIFIED|Clerk#000000869|0|s. slyly silent deposits boo|
+1604|113|F|107139.29|1993-07-17|5-LOW|Clerk#000000512|0|lithely silent waters. blithely unusual packages alongside |
+1605|58|O|130687.64|1998-04-24|4-NOT SPECIFIED|Clerk#000000616|0|sleep furiously? ruthless, even pinto beans |
+1606|53|O|115877.40|1997-04-17|4-NOT SPECIFIED|Clerk#000000550|0|r requests. quickly even platelets breach before the ironically|
+1607|149|O|166335.03|1995-12-16|2-HIGH|Clerk#000000498|0| bold, pending foxes haggle. slyly silent |
+1632|67|O|183286.33|1997-01-08|3-MEDIUM|Clerk#000000351|0|onic requests are accounts. bold a|
+1633|16|O|52359.51|1995-10-14|2-HIGH|Clerk#000000666|0|y silent accounts sl|
+1634|70|O|145898.47|1996-09-10|1-URGENT|Clerk#000000360|0|arefully blithely ironic requests. slyly unusual instructions alongside|
+1635|4|O|70232.26|1997-02-13|3-MEDIUM|Clerk#000000958|0|s. slyly ironic requests affix slyly |
+1636|79|O|172021.87|1997-06-17|3-MEDIUM|Clerk#000000457|0|ding requests. slyly ironic courts wake quickl|
+1637|73|F|180912.15|1995-02-08|4-NOT SPECIFIED|Clerk#000000189|0| final accounts. blithely silent ideas cajole bravely. carefully express |
+1638|139|O|172436.30|1997-08-13|2-HIGH|Clerk#000000643|0|he fluffily regular asymp|
+1639|5|O|104166.56|1995-08-20|4-NOT SPECIFIED|Clerk#000000939|0|haggle furiously. final requests detect furious|
+1664|64|O|178060.22|1996-03-03|1-URGENT|Clerk#000000090|0|y quickly even asymptotes. furiously regular packages haggle quickly fin|
+1665|76|F|4819.91|1994-05-08|2-HIGH|Clerk#000000920|0|ly regular packages are fluffily even ideas. fluffily final|
+1666|95|O|128367.97|1995-10-18|1-URGENT|Clerk#000000849|0|ffily pending dependencies wake fluffily. pending, final accounts |
+1667|5|O|125030.37|1997-10-10|2-HIGH|Clerk#000000103|0|e accounts. slyly express accounts must are a|
+1668|142|O|137576.19|1997-07-12|4-NOT SPECIFIED|Clerk#000000148|0|eodolites. carefully dogged dolphins haggle q|
+1669|2|O|24362.39|1997-06-09|3-MEDIUM|Clerk#000000663|0|er ironic requests detect furiously blithely sp|
+1670|25|O|89999.72|1997-05-24|2-HIGH|Clerk#000000320|0|unusual dependencies. furiously special platelets main|
+1671|35|O|104391.11|1996-07-27|4-NOT SPECIFIED|Clerk#000000275|0|ly. slyly pending requests was above the |
+1696|4|O|102665.03|1998-01-08|4-NOT SPECIFIED|Clerk#000000041|0|bravely bold accounts above the quickly bold|
+1697|76|O|122621.31|1996-10-07|1-URGENT|Clerk#000000815|0|o x-ray blithely. pl|
+1698|40|O|141118.87|1997-04-23|2-HIGH|Clerk#000000432|0|slyly. carefully express deposit|
+1699|85|F|66408.29|1993-12-30|1-URGENT|Clerk#000000125|0|jole blithely. furiously un|
+1700|65|O|89143.36|1996-06-15|3-MEDIUM|Clerk#000000328|0|ely final dolphins wake sometimes above the quietly regular deposits. fur|
+1701|130|F|72835.95|1992-05-19|2-HIGH|Clerk#000000395|0|furiously. regular, close theodoli|
+1702|67|P|194119.31|1995-05-07|2-HIGH|Clerk#000000300|0|around the carefully final deposits cajole carefully according to the b|
+1703|134|F|121220.59|1993-01-28|3-MEDIUM|Clerk#000000463|0| pinto beans poach. bold courts boost. regular, express deposits at|
+1728|64|O|131604.34|1996-05-22|2-HIGH|Clerk#000000711|0|beans. slyly regular instructions sleep! slyly final packages|
+1729|133|F|12137.76|1992-05-19|2-HIGH|Clerk#000000158|0|pending foxes wake. accounts|
+1730|124|O|150886.49|1998-07-24|5-LOW|Clerk#000000794|0| fluffily pending deposits serve. furiously even requests wake furiou|
+1731|128|O|190490.78|1996-01-06|1-URGENT|Clerk#000000268|0|lithely regular, final instructions. ironic, express packages are above|
+1732|146|F|179854.51|1993-11-29|5-LOW|Clerk#000000903|0|inal requests integrate dolph|
+1733|148|O|165489.52|1996-05-12|2-HIGH|Clerk#000000789|0|e carefully according to the accounts. furiously pending instructions sleep|
+1734|7|F|44002.53|1994-06-11|2-HIGH|Clerk#000000722|0| final ideas haggle. blithely quick foxes sleep busily bold ideas. i|
+1735|22|F|98541.95|1992-12-27|1-URGENT|Clerk#000000458|0|ully idle requests wake qu|
+1760|115|O|82151.12|1996-05-17|5-LOW|Clerk#000000917|0| deposits. busily regular deposits wake blithely along the furiously even re|
+1761|106|F|211925.95|1993-12-24|2-HIGH|Clerk#000000817|0|efully slyly bold frets. packages boost b|
+1762|77|F|202227.17|1994-08-20|4-NOT SPECIFIED|Clerk#000000653|0|ly ironic packages. furi|
+1763|121|O|140685.01|1996-10-29|2-HIGH|Clerk#000000321|0|es. bold dependencies haggle furiously along |
+1764|29|F|47384.71|1992-03-25|1-URGENT|Clerk#000000182|0|. slyly final packages integrate carefully acro|
+1765|73|O|36551.43|1995-12-03|4-NOT SPECIFIED|Clerk#000000490|0| regular excuses wake slyly|
+1766|139|O|41032.81|1996-10-12|2-HIGH|Clerk#000000983|0|unusual deposits affix quickly beyond the carefully s|
+1767|25|P|136582.60|1995-03-14|2-HIGH|Clerk#000000327|0|eposits use carefully carefully regular platelets. quickly regular packages al|
+1792|49|F|107919.86|1993-11-09|5-LOW|Clerk#000000102|0|ructions haggle along the pending packages. carefully speci|
+1793|19|F|82504.56|1992-07-12|4-NOT SPECIFIED|Clerk#000000291|0|regular packages cajole. blithely special packages according to the final d|
+1794|140|O|179462.21|1997-09-28|1-URGENT|Clerk#000000686|0|ally silent pinto beans. regular package|
+1795|94|F|146849.33|1994-03-19|2-HIGH|Clerk#000000815|0| quickly final packages! blithely dogged accounts c|
+1796|47|F|33755.47|1992-11-21|2-HIGH|Clerk#000000245|0|eans use furiously around th|
+1797|125|O|51494.47|1996-05-07|3-MEDIUM|Clerk#000000508|0|quiet platelets haggle since the quickly ironic instructi|
+1798|52|O|46393.97|1997-07-28|1-URGENT|Clerk#000000741|0|al foxes are blithe|
+1799|61|F|46815.93|1994-03-07|4-NOT SPECIFIED|Clerk#000000339|0|ns sleep furiously final waters. blithely regular instructions h|
+1824|49|F|81351.53|1994-05-05|1-URGENT|Clerk#000000972|0|e blithely fluffily|
+1825|148|F|150582.77|1993-12-05|3-MEDIUM|Clerk#000000345|0|ironic, final accou|
+1826|82|F|124719.97|1992-04-16|4-NOT SPECIFIED|Clerk#000000718|0|the even asymptotes dazzle fluffily slyly regular asymptotes. final, unu|
+1827|106|O|210113.88|1996-06-22|4-NOT SPECIFIED|Clerk#000000369|0|luffily even requests haggle sly|
+1828|32|F|137369.50|1994-04-18|3-MEDIUM|Clerk#000000840|0|y quickly bold packag|
+1829|112|F|127532.20|1994-05-08|2-HIGH|Clerk#000000537|0| accounts wake above the furiously unusual requests. pending package|
+1830|133|F|85122.24|1995-02-23|1-URGENT|Clerk#000000045|0|according to the even,|
+1831|71|F|58032.77|1993-12-02|1-URGENT|Clerk#000000854|0| accounts. carefully even accounts boost furiously. regular ideas engage. |
+1856|106|F|189361.42|1992-03-20|4-NOT SPECIFIED|Clerk#000000952|0|. special pinto beans run acr|
+1857|133|F|102793.59|1993-01-13|2-HIGH|Clerk#000000083|0|hely final ideas slee|
+1858|143|O|30457.91|1997-12-13|1-URGENT|Clerk#000000389|0|thely. slyly final deposits sleep|
+1859|61|O|105094.09|1997-04-11|4-NOT SPECIFIED|Clerk#000000949|0| the foxes. bravely special excuses nag carefully special r|
+1860|10|O|9103.40|1996-04-04|3-MEDIUM|Clerk#000000556|0|osits. quickly bold deposits according to |
+1861|70|F|95063.41|1994-01-03|3-MEDIUM|Clerk#000000847|0|r the fluffily close sauternes. furio|
+1862|34|O|97981.06|1998-02-24|5-LOW|Clerk#000000348|0|ts snooze ironically abou|
+1863|74|F|96359.65|1993-09-23|4-NOT SPECIFIED|Clerk#000000658|0|old sentiments. careful, |
+1888|121|F|224724.11|1993-10-31|4-NOT SPECIFIED|Clerk#000000659|0|olites. pinto beans cajole. regular deposits affix. slyly regular|
+1889|25|O|96431.77|1997-03-16|1-URGENT|Clerk#000000854|0|p around the regular notornis. unusual deposits|
+1890|10|O|202364.58|1996-12-18|4-NOT SPECIFIED|Clerk#000000627|0|romise final, regular deposits. regular fox|
+1891|61|F|76848.96|1994-12-15|5-LOW|Clerk#000000495|0|unusual foxes sleep regular deposits. requests wake special pac|
+1892|25|F|133273.64|1994-03-26|5-LOW|Clerk#000000733|0|sts. slyly regular dependencies use slyly. ironic, spec|
+1893|125|O|116792.13|1997-10-30|2-HIGH|Clerk#000000111|0|olites. silent, special deposits eat slyly quickly express packages; hockey p|
+1894|76|F|44387.23|1992-03-30|1-URGENT|Clerk#000000626|0|e furiously. furiously even accounts are slyly final accounts. closely speci|
+1895|7|F|44429.81|1994-05-30|3-MEDIUM|Clerk#000000878|0|ress accounts. bold accounts cajole. slyly final pinto beans poach regul|
+1920|110|O|119605.91|1998-06-24|5-LOW|Clerk#000000018|0|hely; furiously regular excuses|
+1921|88|F|57584.12|1994-01-18|3-MEDIUM|Clerk#000000293|0|counts. slyly quiet requests along the ruthlessly regular accounts are |
+1922|56|O|11575.77|1996-07-13|3-MEDIUM|Clerk#000000984|0|side of the blithely final re|
+1923|136|O|171128.10|1997-07-07|1-URGENT|Clerk#000000471|0| express dolphins. |
+1924|76|O|169756.19|1996-09-07|4-NOT SPECIFIED|Clerk#000000823|0| of the ironic accounts. instructions near the final instr|
+1925|17|F|146382.71|1992-03-05|1-URGENT|Clerk#000000986|0|e slyly regular deposits. furiously |
+1926|94|O|100035.03|1996-01-31|2-HIGH|Clerk#000000568|0|cajole. even warhorses sleep carefully. |
+1927|140|O|23327.88|1995-09-30|3-MEDIUM|Clerk#000000616|0|riously special packages. permanent pearls wake furiously. even packages alo|
+1952|67|F|12896.25|1994-03-16|2-HIGH|Clerk#000000254|0| silent accounts boost |
+1953|149|F|57213.18|1993-11-30|3-MEDIUM|Clerk#000000891|0| fluffily along the quickly even packages. |
+1954|56|O|158853.63|1997-05-31|4-NOT SPECIFIED|Clerk#000000104|0| unusual excuses cajole according to the blithely regular theodolites.|
+1955|13|F|103085.13|1992-04-20|1-URGENT|Clerk#000000792|0|ly special ideas. sometimes final |
+1956|127|F|88704.26|1992-09-20|4-NOT SPECIFIED|Clerk#000000600|0|ironic ideas are silent ideas. furiously final deposits sleep slyly carefu|
+1957|31|O|77482.87|1998-07-21|2-HIGH|Clerk#000000639|0|nding excuses about the |
+1958|53|O|176294.34|1995-09-22|5-LOW|Clerk#000000343|0| haggle blithely. flu|
+1959|43|O|62277.18|1997-01-13|4-NOT SPECIFIED|Clerk#000000631|0| cajole about the blithely express requests. even excuses mold bl|
+1984|52|O|79230.47|1998-04-01|1-URGENT|Clerk#000000416|0| slyly special instructions. unusual foxes use packages. carefully regular req|
+1985|7|F|171522.54|1994-09-02|4-NOT SPECIFIED|Clerk#000000741|0|slyly slyly even pains. slyly reg|
+1986|149|F|34269.96|1994-05-05|2-HIGH|Clerk#000000609|0|across the theodolites. quick|
+1987|100|F|6406.29|1994-04-30|2-HIGH|Clerk#000000652|0|gular platelets alongside |
+1988|109|O|117132.72|1995-10-06|4-NOT SPECIFIED|Clerk#000000011|0|ly ironic dolphins serve quickly busy accounts. bu|
+1989|118|F|39263.28|1994-03-16|4-NOT SPECIFIED|Clerk#000000747|0|ely bold pinto beans ha|
+1990|119|F|48781.39|1994-12-16|2-HIGH|Clerk#000000114|0|e bold patterns. always regul|
+1991|19|F|139854.41|1992-09-07|4-NOT SPECIFIED|Clerk#000000854|0|ing accounts can haggle at the carefully final Tiresias-- pending, regular|
+2016|8|O|24347.36|1996-08-16|3-MEDIUM|Clerk#000000641|0|the carefully ironic foxes. requests nag bold, r|
+2017|101|O|70529.27|1998-05-13|3-MEDIUM|Clerk#000000427|0|nusual requests. blit|
+2018|19|P|25007.95|1995-04-05|4-NOT SPECIFIED|Clerk#000000920|0|gular accounts wake fur|
+2019|136|F|43789.14|1992-10-23|1-URGENT|Clerk#000000565|0| furiously bold packages. fluffily fi|
+2020|73|F|136162.13|1993-06-21|3-MEDIUM|Clerk#000000192|0|es. furiously regular packages above the furiously special theodolites are a|
+2021|70|O|27016.74|1995-07-15|1-URGENT|Clerk#000000155|0|ong the furiously regular requests. unusual deposits wake fluffily inside|
+2022|62|F|206742.11|1992-03-15|1-URGENT|Clerk#000000268|0| dependencies sleep fluffily even, ironic deposits. express, silen|
+2023|118|F|144123.37|1992-05-06|5-LOW|Clerk#000000137|0|ular courts engage according to the|
+2048|17|F|33401.77|1993-11-15|1-URGENT|Clerk#000000934|0|s cajole after the blithely final accounts. f|
+2049|31|O|153048.74|1995-12-07|2-HIGH|Clerk#000000859|0|ly regular requests thrash blithely about the fluffily even theodolites. r|
+2050|28|F|208517.98|1994-06-02|4-NOT SPECIFIED|Clerk#000000821|0|d accounts against the furiously regular packages use bli|
+2051|40|O|87988.34|1996-03-18|4-NOT SPECIFIED|Clerk#000000333|0|ctions sleep blithely. blithely regu|
+2052|91|F|141822.19|1992-04-13|2-HIGH|Clerk#000000767|0| requests sleep around the even, even courts. ironic theodolites affix furious|
+2053|142|F|125125.57|1995-02-07|1-URGENT|Clerk#000000717|0|ar requests: blithely sly accounts boost carefully across t|
+2054|41|F|144335.16|1992-06-08|4-NOT SPECIFIED|Clerk#000000103|0|l requests affix carefully about the furiously special|
+2055|97|F|57092.26|1993-09-04|1-URGENT|Clerk#000000067|0|. warhorses affix slyly blithely express instructions? fur|
+2080|95|F|45767.69|1993-06-18|5-LOW|Clerk#000000190|0|ironic, pending theodolites are carefully about the quickly regular theodolite|
+2081|121|O|145654.97|1997-07-05|2-HIGH|Clerk#000000136|0|ong the regular theo|
+2082|49|F|46753.63|1995-01-10|2-HIGH|Clerk#000000354|0|cial accounts. ironic, express dolphins nod slyly sometimes final reques|
+2083|101|F|31795.52|1993-07-14|3-MEDIUM|Clerk#000000361|0|al patterns. bold, final foxes nag bravely about the furiously express|
+2084|80|F|190652.53|1993-03-17|2-HIGH|Clerk#000000048|0|zle furiously final, careful packages. slyly ironic ideas amo|
+2085|49|F|45311.07|1993-11-21|3-MEDIUM|Clerk#000000818|0|ress, express ideas haggle|
+2086|142|F|188985.18|1994-10-19|1-URGENT|Clerk#000000046|0| permanently regular|
+2087|50|O|53581.41|1998-01-31|2-HIGH|Clerk#000000626|0|e always regular packages nod against the furiously spec|
+2112|64|O|17986.15|1997-02-05|2-HIGH|Clerk#000000351|0|against the slyly even id|
+2113|32|O|65678.21|1997-11-08|2-HIGH|Clerk#000000527|0|slyly regular instruct|
+2114|79|F|106446.02|1995-01-16|5-LOW|Clerk#000000751|0|r, unusual accounts haggle across the busy platelets. carefully |
+2115|106|O|134814.65|1998-05-23|4-NOT SPECIFIED|Clerk#000000101|0|odolites boost. carefully regular excuses cajole. quickly ironic pinto be|
+2116|23|F|60887.90|1994-08-26|1-URGENT|Clerk#000000197|0|efully after the asymptotes. furiously sp|
+2117|22|O|145713.03|1997-04-26|2-HIGH|Clerk#000000887|0|ely even dependencies. regular foxes use blithely.|
+2118|134|O|38974.67|1996-10-09|1-URGENT|Clerk#000000196|0|ial requests wake carefully special packages. f|
+2119|64|O|34632.57|1996-08-20|2-HIGH|Clerk#000000434|0|uickly pending escapades. fluffily ir|
+2144|136|F|119917.28|1994-03-29|3-MEDIUM|Clerk#000000546|0|t. carefully quick requests across the deposits wake regu|
+2145|134|F|18885.35|1992-10-03|1-URGENT|Clerk#000000886|0|sts would snooze blithely alongside of th|
+2146|118|F|179686.07|1992-09-14|4-NOT SPECIFIED|Clerk#000000476|0|ven packages. dependencies wake slyl|
+2147|100|F|91513.79|1992-09-06|4-NOT SPECIFIED|Clerk#000000424|0| haggle carefully furiously final foxes. pending escapades thrash. bold theod|
+2148|130|F|19612.03|1995-04-19|4-NOT SPECIFIED|Clerk#000000517|0|ross the furiously unusual theodolites. always expre|
+2149|101|F|105145.40|1993-03-13|5-LOW|Clerk#000000555|0|nusual accounts nag furiously special reques|
+2150|82|F|166961.06|1994-06-03|3-MEDIUM|Clerk#000000154|0|ect slyly against the even, final packages. quickly regular pinto beans wake c|
+2151|58|O|124608.69|1996-11-11|3-MEDIUM|Clerk#000000996|0|c requests. ironic platelets cajole across the quickly fluffy deposits.|
+2176|104|F|87248.17|1992-11-10|1-URGENT|Clerk#000000195|0|s haggle regularly accor|
+2177|136|O|183493.42|1997-01-20|3-MEDIUM|Clerk#000000161|0|ove the blithely unusual packages cajole carefully fluffily special request|
+2178|8|O|79594.68|1996-12-12|3-MEDIUM|Clerk#000000656|0|thely according to the instructions. furious|
+2179|41|O|77487.09|1996-09-07|2-HIGH|Clerk#000000935|0|ounts alongside of the furiously unusual braids cajol|
+2180|76|O|208481.57|1996-09-14|4-NOT SPECIFIED|Clerk#000000650|0|xpress, unusual pains. furiously ironic excu|
+2181|76|O|100954.64|1995-09-13|3-MEDIUM|Clerk#000000814|0|y against the ironic, even|
+2182|23|F|116003.11|1994-04-05|2-HIGH|Clerk#000000071|0|ccounts. quickly bold deposits across the excuses sl|
+2183|113|O|49841.12|1996-06-22|1-URGENT|Clerk#000000287|0| among the express, ironic packages. slyly ironic platelets integrat|
+2208|68|P|245388.06|1995-05-01|4-NOT SPECIFIED|Clerk#000000900|0|symptotes wake slyly blithely unusual packages.|
+2209|91|F|129086.93|1992-07-10|2-HIGH|Clerk#000000056|0|er above the slyly silent requests. furiously reg|
+2210|32|F|31689.46|1992-01-16|2-HIGH|Clerk#000000941|0| believe carefully quickly express pinto beans. deposi|
+2211|92|F|140031.23|1994-06-30|2-HIGH|Clerk#000000464|0|ffily bold courts e|
+2212|118|F|17231.05|1994-03-23|3-MEDIUM|Clerk#000000954|0|structions above the unusual requests use fur|
+2213|122|F|146136.10|1993-01-15|4-NOT SPECIFIED|Clerk#000000598|0|osits are carefully reg|
+2214|115|O|150345.63|1998-05-05|3-MEDIUM|Clerk#000000253|0|packages. fluffily even accounts haggle blithely. carefully ironic depen|
+2215|40|O|108239.46|1996-06-16|4-NOT SPECIFIED|Clerk#000000817|0|le final, final foxes. quickly regular gifts are carefully deposit|
+2240|56|F|174090.30|1992-03-06|4-NOT SPECIFIED|Clerk#000000622|0|accounts against the slyly express foxes are after the slyly regular |
+2241|103|F|165219.08|1993-05-11|1-URGENT|Clerk#000000081|0|y about the silent excuses. furiously ironic instructions along the sil|
+2242|82|O|15082.82|1997-07-20|4-NOT SPECIFIED|Clerk#000000360|0| pending multipliers. carefully express asymptotes use quickl|
+2243|49|O|10451.97|1995-06-10|2-HIGH|Clerk#000000813|0|ously regular deposits integrate s|
+2244|127|F|21207.08|1993-01-09|1-URGENT|Clerk#000001000|0|ckages. ironic, ironic accounts haggle blithely express excuses. |
+2245|58|F|150585.73|1993-04-28|3-MEDIUM|Clerk#000000528|0|ake carefully. braids haggle slyly quickly b|
+2246|113|O|85755.84|1996-05-27|4-NOT SPECIFIED|Clerk#000000739|0| final gifts sleep |
+2247|95|F|13491.31|1992-08-02|4-NOT SPECIFIED|Clerk#000000947|0|furiously regular packages. final brai|
+2272|139|F|127934.71|1993-04-13|2-HIGH|Clerk#000000449|0|s. bold, ironic pinto beans wake. silently specia|
+2273|136|O|142291.79|1996-12-14|5-LOW|Clerk#000000155|0|uickly express foxes haggle quickly against|
+2274|104|F|58273.89|1993-09-04|4-NOT SPECIFIED|Clerk#000000258|0|nstructions try to hag|
+2275|149|F|37398.90|1992-10-22|4-NOT SPECIFIED|Clerk#000000206|0| furiously furious platelets. slyly final packa|
+2276|43|O|141159.63|1996-04-29|4-NOT SPECIFIED|Clerk#000000821|0|ecial requests. fox|
+2277|89|F|79270.23|1995-01-02|4-NOT SPECIFIED|Clerk#000000385|0|accounts cajole. even i|
+2278|142|O|101878.46|1998-04-25|3-MEDIUM|Clerk#000000186|0|r pinto beans integrate after the carefully even deposits. blit|
+2279|80|F|142322.33|1993-02-23|3-MEDIUM|Clerk#000000898|0|de of the quickly unusual instructio|
+2304|46|F|93769.28|1994-01-07|4-NOT SPECIFIED|Clerk#000000415|0|onic platelets. ironic packages haggle. packages nag doggedly according to|
+2305|43|F|122964.66|1993-01-26|2-HIGH|Clerk#000000440|0|ove the furiously even acco|
+2306|28|O|244704.23|1995-07-26|2-HIGH|Clerk#000000975|0| wake furiously requests. permanent requests affix. final packages caj|
+2307|106|F|59417.76|1993-06-29|5-LOW|Clerk#000000952|0|furiously even asymptotes? carefully regular accounts|
+2308|25|F|58546.02|1992-10-25|4-NOT SPECIFIED|Clerk#000000609|0|ts. slyly final depo|
+2309|100|O|146933.07|1995-09-04|5-LOW|Clerk#000000803|0|he carefully pending packages. fluffily stealthy foxes engage carefully|
+2310|31|O|82928.12|1996-09-20|5-LOW|Clerk#000000917|0|wake carefully. unusual instructions nag ironic, regular excuse|
+2311|73|P|153233.93|1995-05-02|2-HIGH|Clerk#000000761|0|ly pending asymptotes-- furiously bold excus|
+2336|142|O|22294.51|1996-01-07|4-NOT SPECIFIED|Clerk#000000902|0|c, final excuses sleep furiously among the even theodolites. f|
+2337|142|O|45704.96|1997-06-18|4-NOT SPECIFIED|Clerk#000000754|0| quickly. final accounts haggle. carefully final acco|
+2338|140|O|28155.92|1997-09-15|2-HIGH|Clerk#000000951|0|riously final dugouts. final, ironic packages wake express, ironic id|
+2339|109|F|63470.78|1993-12-15|5-LOW|Clerk#000000847|0| against the regular |
+2340|65|O|30778.78|1996-01-12|1-URGENT|Clerk#000000964|0|ter the deposits sleep according to the slyly regular packages. carefully |
+2341|82|F|55950.21|1993-05-30|5-LOW|Clerk#000000443|0|sts-- blithely bold dolphins through the deposits nag blithely carefully re|
+2342|37|O|104038.78|1996-06-09|1-URGENT|Clerk#000000615|0|oost carefully across the regular accounts. blithely final d|
+2343|73|O|85381.00|1995-08-21|3-MEDIUM|Clerk#000000170|0|fluffily over the slyly special deposits. quickl|
+2368|13|F|101240.96|1993-08-20|1-URGENT|Clerk#000000830|0|t the bold instructions. carefully unusual |
+2369|110|O|73517.91|1996-12-24|2-HIGH|Clerk#000000752|0|iously even requests are dogged, express |
+2370|142|F|73924.21|1994-01-17|1-URGENT|Clerk#000000231|0|lyly final packages. quickly final deposits haggl|
+2371|19|O|193857.67|1998-01-07|1-URGENT|Clerk#000000028|0|ckages haggle at th|
+2372|31|O|104927.66|1997-11-21|5-LOW|Clerk#000000342|0|s: deposits haggle along the final ideas. careful|
+2373|28|F|55211.04|1994-03-12|4-NOT SPECIFIED|Clerk#000000306|0| even, special courts grow quickly. pending,|
+2374|4|F|115219.88|1993-10-29|4-NOT SPECIFIED|Clerk#000000081|0| blithely regular packages. blithely unusua|
+2375|5|O|106612.48|1996-11-20|3-MEDIUM|Clerk#000000197|0|unusual, pending theodolites cajole carefully |
+2400|37|O|92798.66|1998-07-25|5-LOW|Clerk#000000782|0|nusual courts nag against the carefully unusual pinto b|
+2401|148|O|88448.24|1997-07-29|4-NOT SPECIFIED|Clerk#000000531|0|ully unusual instructions boost carefully silently regular requests. |
+2402|67|O|70403.62|1996-09-06|4-NOT SPECIFIED|Clerk#000000162|0|slyly final sheaves sleep slyly. q|
+2403|55|O|111020.79|1998-04-11|3-MEDIUM|Clerk#000000820|0|furiously regular deposits use. furiously unusual accounts wake along the |
+2404|77|O|109077.69|1997-03-13|4-NOT SPECIFIED|Clerk#000000409|0|deposits breach furiously. ironic foxes haggle carefully bold packag|
+2405|73|O|115929.14|1996-12-23|3-MEDIUM|Clerk#000000535|0|ular, regular asympto|
+2406|7|O|182516.77|1996-10-28|5-LOW|Clerk#000000561|0|blithely regular accounts u|
+2407|55|O|112843.52|1998-06-19|2-HIGH|Clerk#000000068|0|uests affix slyly among the slyly regular depos|
+2432|103|O|62661.93|1996-07-13|1-URGENT|Clerk#000000115|0|re. slyly even deposits wake bra|
+2433|31|F|147071.86|1994-08-22|4-NOT SPECIFIED|Clerk#000000324|0|ess patterns are slyly. packages haggle carefu|
+2434|25|O|123956.25|1997-04-27|3-MEDIUM|Clerk#000000190|0|s. quickly ironic dolphins impress final deposits. blithel|
+2435|73|F|122490.66|1993-02-21|5-LOW|Clerk#000000112|0|es are carefully along the carefully final instructions. pe|
+2436|125|O|73990.08|1995-09-11|4-NOT SPECIFIED|Clerk#000000549|0|arefully. blithely bold deposits affix special accounts. final foxes nag. spe|
+2437|85|F|143411.69|1993-04-21|4-NOT SPECIFIED|Clerk#000000578|0|. theodolites wake slyly-- ironic, pending platelets above the carefully exp|
+2438|13|F|214494.39|1993-07-15|2-HIGH|Clerk#000000744|0|the final, regular warhorses. regularly |
+2439|55|O|41811.12|1997-03-15|2-HIGH|Clerk#000000819|0|lithely after the car|
+2464|145|O|30495.65|1997-11-23|5-LOW|Clerk#000000633|0|le about the instructions. courts wake carefully even|
+2465|34|O|180737.75|1995-06-24|1-URGENT|Clerk#000000078|0|al pinto beans. final, bold packages wake quickly|
+2466|19|F|161625.50|1994-03-06|1-URGENT|Clerk#000000424|0|c pinto beans. express deposits wake quickly. even, final courts nag. package|
+2467|35|O|7231.91|1995-07-16|4-NOT SPECIFIED|Clerk#000000914|0|pades sleep furiously. sometimes regular packages again|
+2468|112|O|160627.01|1997-06-09|4-NOT SPECIFIED|Clerk#000000260|0|ickly regular packages. slyly ruthless requests snooze quickly blithe|
+2469|124|O|192074.23|1996-11-26|5-LOW|Clerk#000000730|0| sleep closely regular instructions. furiously ironic instructi|
+2470|58|O|104966.33|1997-04-19|3-MEDIUM|Clerk#000000452|0|to the furiously final packages? pa|
+2471|89|O|34936.31|1998-03-12|4-NOT SPECIFIED|Clerk#000000860|0|carefully blithely regular pac|
+2496|136|F|140390.60|1994-01-09|2-HIGH|Clerk#000000142|0|slyly. pending instructions sleep. quic|
+2497|47|F|171326.48|1992-08-27|1-URGENT|Clerk#000000977|0|ily ironic pinto beans. furiously final platelets alongside of t|
+2498|97|F|45514.27|1993-11-08|5-LOW|Clerk#000000373|0|g the slyly special pinto beans. |
+2499|121|O|147243.86|1995-09-24|1-URGENT|Clerk#000000277|0|r the quickly bold foxes. bold instructi|
+2500|133|F|131122.82|1992-08-15|2-HIGH|Clerk#000000447|0|integrate slyly pending deposits. furiously ironic accounts across the s|
+2501|67|O|79380.51|1997-05-25|5-LOW|Clerk#000000144|0|ickly special theodolite|
+2502|70|F|33470.40|1993-05-28|4-NOT SPECIFIED|Clerk#000000914|0|lyly: carefully pending ideas affix again|
+2503|7|F|183671.08|1993-06-20|3-MEDIUM|Clerk#000000294|0|ly even packages was. ironic, regular deposits unwind furiously across the p|
+2528|55|F|92069.62|1994-11-20|1-URGENT|Clerk#000000789|0|ular dependencies? regular frays kindle according to the blith|
+2529|136|O|4104.30|1996-08-20|2-HIGH|Clerk#000000511|0|posits across the silent instructions wake blithely across |
+2530|128|F|58853.11|1994-03-21|3-MEDIUM|Clerk#000000291|0|ular instructions about the quic|
+2531|44|O|143212.85|1996-05-06|4-NOT SPECIFIED|Clerk#000000095|0|even accounts. furiously ironic excuses sleep fluffily. carefully silen|
+2532|94|O|116093.49|1995-10-11|2-HIGH|Clerk#000000498|0|the blithely pending accounts. regular, regular excuses boost aro|
+2533|50|O|168495.03|1997-03-24|1-URGENT|Clerk#000000594|0|ecial instructions. spec|
+2534|76|O|202784.54|1996-07-17|3-MEDIUM|Clerk#000000332|0|packages cajole ironic requests. furiously regular|
+2535|121|F|67018.30|1993-05-25|5-LOW|Clerk#000000296|0|phins cajole beneath the fluffily express asymptotes. c|
+2560|131|F|153426.79|1992-09-05|1-URGENT|Clerk#000000538|0|atelets; quickly sly requests|
+2561|58|O|137473.58|1997-11-14|1-URGENT|Clerk#000000861|0|ual requests. unusual deposits cajole furiously pending, regular platelets. |
+2562|10|F|136360.37|1992-08-01|1-URGENT|Clerk#000000467|0|elets. pending dolphins promise slyly. bo|
+2563|62|F|168952.10|1993-11-19|4-NOT SPECIFIED|Clerk#000000150|0|sly even packages after the furio|
+2564|77|F|3967.47|1994-09-09|2-HIGH|Clerk#000000718|0|usly regular pinto beans. orbits wake carefully. slyly e|
+2565|56|O|204438.57|1998-02-28|3-MEDIUM|Clerk#000000032|0|x-ray blithely along|
+2566|86|F|89992.48|1992-10-10|3-MEDIUM|Clerk#000000414|0|ructions boost bold ideas. idly ironic accounts use according to th|
+2567|70|O|263411.29|1998-02-27|2-HIGH|Clerk#000000031|0|detect. furiously ironic requests|
+2592|101|F|8225.96|1993-03-05|4-NOT SPECIFIED|Clerk#000000524|0|ts nag fluffily. quickly stealthy theodolite|
+2593|92|F|134726.09|1993-09-04|2-HIGH|Clerk#000000468|0|r the carefully final|
+2594|79|F|94866.39|1992-12-17|1-URGENT|Clerk#000000550|0|ests. theodolites above the blithely even accounts detect furio|
+2595|74|O|173130.20|1995-12-14|4-NOT SPECIFIED|Clerk#000000222|0|arefully ironic requests nag carefully ideas. |
+2596|43|O|74940.13|1996-08-17|1-URGENT|Clerk#000000242|0|requests. ironic, bold theodolites wak|
+2597|104|F|21964.66|1993-02-04|2-HIGH|Clerk#000000757|0|iously ruthless exc|
+2598|112|O|84871.50|1996-03-05|3-MEDIUM|Clerk#000000391|0| ironic notornis according to the blithely final requests should |
+2599|149|O|62807.13|1996-11-07|2-HIGH|Clerk#000000722|0|ts. slyly regular theodolites wake sil|
+2624|52|O|27148.63|1996-11-28|5-LOW|Clerk#000000930|0|ic, regular packages|
+2625|40|F|39382.74|1992-10-14|4-NOT SPECIFIED|Clerk#000000386|0| final deposits. blithely ironic ideas |
+2626|139|O|84314.51|1995-09-08|4-NOT SPECIFIED|Clerk#000000289|0|gside of the carefully special packages are furiously after the slyly express |
+2627|149|F|26798.65|1992-03-24|3-MEDIUM|Clerk#000000181|0|s. silent, ruthless requests|
+2628|56|F|165655.99|1993-10-22|5-LOW|Clerk#000000836|0|ajole across the blithely careful accounts. blithely silent deposits sl|
+2629|139|O|96458.03|1998-04-06|5-LOW|Clerk#000000680|0|uches dazzle carefully even, express excuses. ac|
+2630|85|F|127132.51|1992-10-24|5-LOW|Clerk#000000712|0|inal theodolites. ironic instructions s|
+2631|37|F|63103.32|1993-09-24|5-LOW|Clerk#000000833|0| quickly unusual deposits doubt around |
+2656|77|F|105492.37|1993-05-04|1-URGENT|Clerk#000000307|0|elets. slyly final accou|
+2657|25|O|148176.06|1995-10-17|2-HIGH|Clerk#000000160|0| foxes-- slyly final dependencies around the slyly final theodo|
+2658|14|O|163834.46|1995-09-23|3-MEDIUM|Clerk#000000400|0|bout the slyly regular accounts. ironic, |
+2659|83|F|79785.52|1993-12-18|4-NOT SPECIFIED|Clerk#000000758|0|cross the pending requests maintain |
+2660|127|O|16922.51|1995-08-05|5-LOW|Clerk#000000480|0|ly finally regular deposits. ironic theodolites cajole|
+2661|74|O|106036.84|1997-01-04|3-MEDIUM|Clerk#000000217|0|al, regular pinto beans. silently final deposits should have t|
+2662|37|O|87689.88|1996-08-21|3-MEDIUM|Clerk#000000589|0|bold pinto beans above the slyly final accounts affix furiously deposits. pac|
+2663|95|O|35131.80|1995-09-06|1-URGENT|Clerk#000000950|0|ar requests. furiously final dolphins along the fluffily spe|
+2688|98|F|181077.36|1992-01-24|2-HIGH|Clerk#000000720|0|have to nag according to the pending theodolites. sly|
+2689|103|F|41552.78|1992-04-09|4-NOT SPECIFIED|Clerk#000000698|0|press pains wake. furiously express theodolites alongsid|
+2690|94|O|224674.27|1996-03-31|3-MEDIUM|Clerk#000000760|0|ravely even theodolites |
+2691|7|F|30137.17|1992-04-30|5-LOW|Clerk#000000439|0|es at the regular deposits sleep slyly by the fluffy requests. eve|
+2692|62|O|24265.24|1997-12-02|3-MEDIUM|Clerk#000000878|0|es. regular asymptotes cajole above t|
+2693|19|O|66158.13|1996-09-04|1-URGENT|Clerk#000000370|0|ndle never. blithely regular packages nag carefully enticing platelets. ca|
+2694|121|O|102807.59|1996-03-14|5-LOW|Clerk#000000722|0| requests. bold deposits above the theodol|
+2695|58|O|138584.20|1996-08-20|1-URGENT|Clerk#000000697|0|ven deposits around the quickly regular packa|
+2720|31|F|161307.05|1993-06-08|1-URGENT|Clerk#000000948|0|quickly. special asymptotes are fluffily ironi|
+2721|79|O|59180.25|1996-01-27|2-HIGH|Clerk#000000401|0| ideas eat even, unusual ideas. theodolites are carefully|
+2722|35|F|50328.84|1994-04-09|5-LOW|Clerk#000000638|0|rding to the carefully quick deposits. bli|
+2723|61|O|104759.25|1995-10-06|5-LOW|Clerk#000000836|0|nts must have to cajo|
+2724|137|F|116069.66|1994-09-14|2-HIGH|Clerk#000000217|0| sleep blithely. blithely idle |
+2725|89|F|75144.68|1994-05-21|4-NOT SPECIFIED|Clerk#000000835|0|ular deposits. spec|
+2726|7|F|47753.00|1992-11-27|5-LOW|Clerk#000000470|0| blithely even dinos sleep care|
+2727|74|O|3089.42|1998-04-19|4-NOT SPECIFIED|Clerk#000000879|0|sual theodolites cajole enticingly above the furiously fin|
+2752|59|F|187932.30|1993-11-19|2-HIGH|Clerk#000000648|0| carefully regular foxes are quickly quickl|
+2753|16|F|159720.39|1993-11-30|2-HIGH|Clerk#000000380|0|ending instructions. unusual deposits|
+2754|145|F|25985.52|1994-04-03|2-HIGH|Clerk#000000960|0|cies detect slyly. |
+2755|118|F|101202.18|1992-02-07|4-NOT SPECIFIED|Clerk#000000177|0|ously according to the sly foxes. blithely regular pinto bean|
+2756|118|F|142323.38|1994-04-18|1-URGENT|Clerk#000000537|0|arefully special warho|
+2757|76|O|89792.48|1995-07-20|2-HIGH|Clerk#000000216|0| regular requests subl|
+2758|43|O|36671.88|1998-07-12|5-LOW|Clerk#000000863|0|s cajole according to the carefully special |
+2759|116|F|89731.10|1993-11-25|4-NOT SPECIFIED|Clerk#000000071|0|ts. regular, pending pinto beans sleep ab|
+2784|95|O|106635.21|1998-01-07|1-URGENT|Clerk#000000540|0|g deposits alongside of the silent requests s|
+2785|148|O|132854.79|1995-07-21|2-HIGH|Clerk#000000098|0|iously pending packages sleep according to the blithely unusual foxe|
+2786|79|F|178254.66|1992-03-22|2-HIGH|Clerk#000000976|0|al platelets cajole blithely ironic requests. ironic re|
+2787|103|O|3726.14|1995-09-30|1-URGENT|Clerk#000000906|0|he ironic, regular |
+2788|124|F|17172.66|1994-09-22|1-URGENT|Clerk#000000641|0|nts wake across the fluffily bold accoun|
+2789|37|O|219123.27|1998-03-14|2-HIGH|Clerk#000000972|0|gular patterns boost. carefully even re|
+2790|25|F|177458.97|1994-08-19|2-HIGH|Clerk#000000679|0| the carefully express deposits sleep slyly |
+2791|121|F|156697.55|1994-10-10|2-HIGH|Clerk#000000662|0|as. slyly ironic accounts play furiously bl|
+2816|58|F|42225.53|1994-09-20|2-HIGH|Clerk#000000289|0|kages at the final deposits cajole furious foxes. quickly |
+2817|40|F|71453.85|1994-04-19|3-MEDIUM|Clerk#000000982|0|ic foxes haggle upon the daringly even pinto beans. slyly|
+2818|49|F|120086.84|1994-12-12|3-MEDIUM|Clerk#000000413|0|eep furiously special ideas. express |
+2819|103|F|66927.16|1994-05-05|1-URGENT|Clerk#000000769|0|ngside of the blithely ironic dolphins. furio|
+2820|19|F|143813.39|1994-05-20|3-MEDIUM|Clerk#000000807|0|equests are furiously. carefu|
+2821|118|F|36592.48|1993-08-09|3-MEDIUM|Clerk#000000323|0|ng requests. even instructions are quickly express, silent instructi|
+2822|79|F|40142.15|1993-07-26|2-HIGH|Clerk#000000510|0|furiously against the accounts. unusual accounts aft|
+2823|79|O|171894.45|1995-09-09|2-HIGH|Clerk#000000567|0|encies. carefully fluffy accounts m|
+2848|70|F|116258.53|1992-03-10|1-URGENT|Clerk#000000256|0|ly fluffy foxes sleep furiously across the slyly regu|
+2849|46|O|180054.29|1996-04-30|2-HIGH|Clerk#000000659|0|al packages are after the quickly bold requests. carefully special |
+2850|100|O|122969.79|1996-10-02|2-HIGH|Clerk#000000392|0|, regular deposits. furiously pending packages hinder carefully carefully u|
+2851|145|O|7859.36|1997-09-07|5-LOW|Clerk#000000566|0|Tiresias wake quickly quickly even|
+2852|91|F|99050.81|1993-01-16|1-URGENT|Clerk#000000740|0|ruthless deposits against the final instructions use quickly al|
+2853|94|F|103641.15|1994-05-05|2-HIGH|Clerk#000000878|0|the carefully even packages.|
+2854|139|F|153568.02|1994-06-27|1-URGENT|Clerk#000000010|0| furiously ironic tithes use furiously |
+2855|49|F|48419.58|1993-04-04|4-NOT SPECIFIED|Clerk#000000973|0| silent, regular packages sleep |
+2880|8|F|145761.99|1992-03-15|2-HIGH|Clerk#000000756|0|ves maintain doggedly spec|
+2881|100|F|45695.84|1992-05-10|5-LOW|Clerk#000000864|0|uriously. slyly express requests according to the silent dol|
+2882|121|O|172872.37|1995-08-22|2-HIGH|Clerk#000000891|0|pending deposits. carefully eve|
+2883|121|F|170360.27|1995-01-23|5-LOW|Clerk#000000180|0|uses. carefully ironic accounts lose fluffil|
+2884|92|O|71683.84|1997-10-12|3-MEDIUM|Clerk#000000780|0|efully express instructions sleep against|
+2885|7|F|146896.72|1992-09-19|4-NOT SPECIFIED|Clerk#000000280|0|ly sometimes special excuses. final requests are |
+2886|109|F|94527.23|1994-11-13|4-NOT SPECIFIED|Clerk#000000619|0|uctions. ironic packages sle|
+2887|109|O|28571.39|1997-05-26|5-LOW|Clerk#000000566|0|slyly even pinto beans. slyly bold epitaphs cajole blithely above t|
+2912|94|F|27727.52|1992-03-12|5-LOW|Clerk#000000186|0|jole blithely above the quickly regular packages. carefully regular pinto bean|
+2913|43|O|130702.19|1997-07-12|3-MEDIUM|Clerk#000000118|0|mptotes doubt furiously slyly regu|
+2914|109|F|60867.14|1993-03-03|3-MEDIUM|Clerk#000000543|0|he slyly regular theodolites are furiously sile|
+2915|94|F|96015.13|1994-03-31|5-LOW|Clerk#000000410|0|ld packages. bold deposits boost blithely. ironic, unusual theodoli|
+2916|8|O|20182.22|1995-12-27|2-HIGH|Clerk#000000681|0|ithely blithe deposits sleep beyond the|
+2917|91|O|100714.13|1997-12-09|4-NOT SPECIFIED|Clerk#000000061|0| special dugouts among the special deposi|
+2918|118|O|21760.09|1996-09-08|3-MEDIUM|Clerk#000000439|0|ular deposits across th|
+2919|53|F|137223.14|1993-12-10|2-HIGH|Clerk#000000209|0|es. pearls wake quietly slyly ironic instructions--|
+2944|14|O|146581.14|1997-09-24|4-NOT SPECIFIED|Clerk#000000740|0|deas. permanently special foxes haggle carefully ab|
+2945|29|O|223507.72|1996-01-03|2-HIGH|Clerk#000000499|0|ons are carefully toward the permanent, bold pinto beans. regu|
+2946|125|O|102226.59|1996-02-05|5-LOW|Clerk#000000329|0|g instructions about the regular accounts sleep carefully along the pen|
+2947|70|P|43360.95|1995-04-26|1-URGENT|Clerk#000000464|0|ronic accounts. accounts run furiously d|
+2948|44|F|100758.71|1994-08-23|5-LOW|Clerk#000000701|0| deposits according to the blithely pending |
+2949|137|F|94231.71|1994-04-12|2-HIGH|Clerk#000000184|0|y ironic accounts use. quickly blithe accou|
+2950|136|O|183620.33|1997-07-06|1-URGENT|Clerk#000000833|0| dolphins around the furiously |
+2951|74|O|125509.17|1996-02-06|2-HIGH|Clerk#000000680|0|gular deposits above the finally regular ideas integrate idly stealthil|
+2976|29|F|145768.47|1993-12-10|4-NOT SPECIFIED|Clerk#000000159|0|. furiously ironic asymptotes haggle ruthlessly silently regular r|
+2977|73|O|25170.88|1996-08-27|3-MEDIUM|Clerk#000000252|0|quickly special platelets are furio|
+2978|44|P|139542.14|1995-05-03|1-URGENT|Clerk#000000135|0|d. even platelets are. ironic dependencies cajole slow, e|
+2979|133|O|116789.98|1996-03-23|3-MEDIUM|Clerk#000000820|0|even, ironic foxes sleep along|
+2980|4|O|187514.11|1996-09-14|3-MEDIUM|Clerk#000000661|0|y quick pinto beans wake. slyly re|
+2981|49|O|37776.79|1998-07-29|5-LOW|Clerk#000000299|0|hely among the express foxes. blithely stealthy requests cajole boldly. regu|
+2982|85|F|55582.94|1995-03-19|2-HIGH|Clerk#000000402|0|lyly. express theodolites affix slyly after the slyly speci|
+2983|62|F|58168.07|1992-01-07|1-URGENT|Clerk#000000278|0|r the even requests. accounts maintain. regular accounts|
+3008|40|O|156018.74|1995-11-08|3-MEDIUM|Clerk#000000701|0|ze quickly. blithely regular packages above the slyly bold foxes shall|
+3009|55|O|108424.94|1997-02-28|1-URGENT|Clerk#000000205|0|r ideas. carefully pe|
+3010|8|O|141647.08|1996-01-26|2-HIGH|Clerk#000000931|0| blithely final requests. special deposits are slyl|
+3011|91|F|46418.85|1992-01-14|5-LOW|Clerk#000000515|0|onic deposits kindle slyly. dependencies around the quickly iro|
+3012|32|F|91678.66|1993-05-05|1-URGENT|Clerk#000000414|0|ts after the regular pinto beans impress blithely s|
+3013|143|O|156407.40|1997-02-05|5-LOW|Clerk#000000591|0|the furiously pendin|
+3014|29|F|194159.59|1992-10-30|4-NOT SPECIFIED|Clerk#000000476|0|ep blithely according to the blith|
+3015|103|F|110826.83|1992-09-27|5-LOW|Clerk#000000013|0|ously regular deposits affix carefully. furiousl|
+3040|112|F|119201.64|1993-04-12|3-MEDIUM|Clerk#000000544|0|carefully special packages. blithe|
+3041|113|O|23039.46|1997-06-03|5-LOW|Clerk#000000092|0|s. unusual, pending deposits use carefully. thinly final|
+3042|20|F|104523.03|1994-11-21|3-MEDIUM|Clerk#000000573|0| the slyly ironic depo|
+3043|44|F|78221.69|1992-04-25|5-LOW|Clerk#000000137|0|cajole blithely furiously fina|
+3044|53|O|52433.54|1996-04-03|2-HIGH|Clerk#000000008|0|cajole final courts. ironic deposits about the quickly final re|
+3045|50|O|85822.67|1995-09-27|1-URGENT|Clerk#000000405|0| express courts sleep quickly special asymptotes. |
+3046|32|O|117817.52|1995-11-30|2-HIGH|Clerk#000000522|0|r deposits. platelets use furi|
+3047|25|O|37881.31|1997-03-21|1-URGENT|Clerk#000000962|0|as. slyly express deposits are dogged pearls. silent ide|
+3072|23|F|87475.82|1994-01-30|4-NOT SPECIFIED|Clerk#000000370|0|ely final deposits cajole carefully. ironic, re|
+3073|136|F|151419.50|1994-01-08|3-MEDIUM|Clerk#000000404|0|kly slyly bold accounts. express courts near the regular ideas sleep bli|
+3074|67|F|85861.93|1992-11-01|5-LOW|Clerk#000000546|0|yly even asymptotes shall have to haggle fluffily. deposits are|
+3075|127|F|37696.70|1994-05-07|3-MEDIUM|Clerk#000000433|0|ackages: carefully unusual reques|
+3076|92|F|93828.15|1993-07-23|2-HIGH|Clerk#000000099|0|busy foxes. deposits affix quickly ironic, pending pint|
+3077|121|O|99290.01|1997-08-06|2-HIGH|Clerk#000000228|0|kly. fluffily ironic requests use qui|
+3078|49|F|46310.83|1993-02-12|2-HIGH|Clerk#000000110|0|ounts are alongside of the blith|
+3079|100|O|148299.05|1997-09-12|5-LOW|Clerk#000000505|0|lly ironic accounts|
+3104|70|F|102693.61|1993-09-16|3-MEDIUM|Clerk#000000871|0|ges boost-- regular accounts are furiousl|
+3105|137|O|125396.80|1996-11-13|4-NOT SPECIFIED|Clerk#000000772|0|s. blithely final ins|
+3106|145|O|132494.97|1997-01-12|3-MEDIUM|Clerk#000000729|0|its use slyly final theodolites; regular dolphins hang above t|
+3107|26|O|107406.26|1997-08-21|1-URGENT|Clerk#000000669|0|ously even deposits acr|
+3108|85|F|63278.00|1993-08-05|1-URGENT|Clerk#000000574|0|s packages haggle furiously am|
+3109|124|F|216104.85|1993-07-24|5-LOW|Clerk#000000936|0|bold requests sleep quickly according to the slyly final|
+3110|88|F|115161.29|1994-12-17|2-HIGH|Clerk#000000564|0|round the fluffy instructions. carefully silent packages cajol|
+3111|133|O|154383.37|1995-08-25|5-LOW|Clerk#000000922|0|slyly regular theodolites. furious deposits cajole deposits. ironic theodoli|
+3136|23|F|145426.11|1994-08-10|4-NOT SPECIFIED|Clerk#000000891|0|tructions sleep slyly. pending di|
+3137|136|O|8958.65|1995-07-26|3-MEDIUM|Clerk#000000063|0|ymptotes wake carefully above t|
+3138|139|F|139579.18|1994-02-09|4-NOT SPECIFIED|Clerk#000000650|0|e fluffily final theodolites. even dependencies wake along the quickly ir|
+3139|17|F|40975.96|1992-01-02|3-MEDIUM|Clerk#000000855|0|ounts against the ruthlessly unusual dolphins|
+3140|145|F|54356.10|1992-04-09|1-URGENT|Clerk#000000670|0|carefully ironic deposits use furiously. blith|
+3141|26|O|115959.96|1995-11-10|1-URGENT|Clerk#000000475|0|es. furiously bold instructions after the carefully final p|
+3142|8|F|16030.15|1992-06-28|3-MEDIUM|Clerk#000000043|0|usual accounts about the carefully special requests sleep slyly quickly regul|
+3143|107|F|135647.68|1993-02-17|1-URGENT|Clerk#000000519|0| are final, ironic accounts. ironic |
+3168|136|F|69412.71|1992-01-30|5-LOW|Clerk#000000352|0|s sleep slyly? ironic, furious instructions detect. quickly final i|
+3169|19|F|126804.90|1993-12-21|3-MEDIUM|Clerk#000000252|0| even pinto beans are blithely special, special multip|
+3170|5|O|190142.17|1997-11-09|1-URGENT|Clerk#000000288|0|requests. furiously bold|
+3171|47|F|84405.78|1993-04-06|5-LOW|Clerk#000000940|0|ar deposits. idly r|
+3172|89|F|121360.83|1992-06-03|4-NOT SPECIFIED|Clerk#000000771|0|es. slyly ironic packages x-ra|
+3173|148|O|64892.73|1996-08-10|5-LOW|Clerk#000000516|0|ial requests lose along t|
+3174|127|O|92856.91|1995-11-15|5-LOW|Clerk#000000663|0|rts. silent, regular pinto beans are blithely regular packages. furiousl|
+3175|44|F|205282.63|1994-07-15|5-LOW|Clerk#000000629|0| across the slyly even realms use carefully ironic deposits: sl|
+3200|13|O|131103.31|1996-02-07|1-URGENT|Clerk#000000020|0| regular dependencies impress evenly even excuses. blithely |
+3201|97|F|90755.31|1993-07-02|4-NOT SPECIFIED|Clerk#000000738|0|. busy, express instruction|
+3202|88|F|50601.01|1992-12-24|5-LOW|Clerk#000000067|0|fluffily express requests affix carefully around th|
+3203|127|O|49357.72|1997-11-05|2-HIGH|Clerk#000000493|0|e furiously silent warhorses. slyly silent deposits wake bli|
+3204|10|F|41573.42|1992-12-26|1-URGENT|Clerk#000000693|0|ess somas cajole slyly. pending accounts cajole|
+3205|148|F|153637.79|1992-04-11|5-LOW|Clerk#000000803|0|e furiously. quickly regular dinos about the final pinto be|
+3206|122|O|64344.86|1996-08-09|1-URGENT|Clerk#000000755|0|ntegrate furiously final, express |
+3207|22|O|133038.59|1998-02-16|1-URGENT|Clerk#000000695|0|uriously accounts. fluffily i|
+3232|82|F|55619.01|1992-10-09|1-URGENT|Clerk#000000314|0|yly final accounts. packages agains|
+3233|140|F|54121.92|1994-10-24|5-LOW|Clerk#000000470|0|ly ironic epitaphs use stealthy, express deposits. quickly regular instruct|
+3234|14|O|147343.68|1996-04-05|4-NOT SPECIFIED|Clerk#000000367|0|ents according to the dependencies will sleep after the blithely even p|
+3235|46|O|104695.09|1995-11-15|5-LOW|Clerk#000000349|0| quickly pinto beans. ironi|
+3236|142|O|39470.39|1996-11-06|4-NOT SPECIFIED|Clerk#000000553|0|ithely slyly pending req|
+3237|19|F|10508.12|1992-06-03|1-URGENT|Clerk#000000606|0|inal requests. slyly even foxes detect about the furiously exp|
+3238|61|F|41375.69|1993-02-21|5-LOW|Clerk#000000818|0|lly express deposits are. furiously unusual ideas wake carefully somas. instr|
+3239|35|O|156802.80|1998-01-12|4-NOT SPECIFIED|Clerk#000000619|0| cajole carefully along the furiously pending deposits. |
+3264|94|O|162634.53|1996-11-02|5-LOW|Clerk#000000244|0|carefully. express, bold|
+3265|53|F|43315.15|1992-06-27|1-URGENT|Clerk#000000265|0|re quickly quickly pe|
+3266|4|P|68309.28|1995-03-17|5-LOW|Clerk#000000545|0|refully ironic instructions. slyly final pi|
+3267|112|O|33998.90|1997-01-07|5-LOW|Clerk#000000484|0| the packages. regular decoys about the bold dependencies grow fi|
+3268|142|F|36024.96|1994-06-25|5-LOW|Clerk#000000746|0|y brave requests unwind furiously accordin|
+3269|17|O|218697.85|1996-03-01|3-MEDIUM|Clerk#000000378|0|ts. accounts wake carefully. carefully dogged accounts wake slyly slyly i|
+3270|38|O|166669.86|1997-05-28|1-URGENT|Clerk#000000375|0|uffily pending courts ca|
+3271|34|F|86534.05|1992-01-01|1-URGENT|Clerk#000000421|0|s. furiously regular requests|
+3296|148|F|187553.35|1994-10-19|3-MEDIUM|Clerk#000000991|0|as! carefully final requests wake. furiously even|
+3297|139|F|9679.45|1992-11-03|2-HIGH|Clerk#000000220|0| after the theodolites cajole carefully according to the finally|
+3298|116|O|62716.67|1996-04-17|5-LOW|Clerk#000000241|0|even accounts boost |
+3299|91|F|42867.92|1993-12-26|3-MEDIUM|Clerk#000000853|0|bold deposits. special instructions sleep care|
+3300|118|O|27049.22|1995-07-15|5-LOW|Clerk#000000198|0|ses. carefully unusual instructions must have to detect about the blithel|
+3301|133|F|48497.09|1994-09-04|4-NOT SPECIFIED|Clerk#000000325|0|ular gifts impress enticingly carefully express deposits; instructions boo|
+3302|34|O|38330.42|1995-11-14|2-HIGH|Clerk#000000367|0|eep blithely ironic requests. quickly even courts haggle slyly|
+3303|145|O|97758.28|1997-12-14|4-NOT SPECIFIED|Clerk#000000661|0|nto beans sleep furiously above the carefully ironic |
+3328|7|F|139580.85|1992-11-19|5-LOW|Clerk#000000384|0|ake among the express accounts? carefully ironic packages cajole never.|
+3329|4|O|46107.70|1995-07-03|2-HIGH|Clerk#000000236|0|old deposits. special accounts haggle furiousl|
+3330|7|F|43255.19|1994-12-19|1-URGENT|Clerk#000000124|0|kages use. carefully regular deposits cajole carefully about |
+3331|91|F|65189.17|1993-05-21|2-HIGH|Clerk#000000901|0|uffily carefully sly accounts. blithely unu|
+3332|143|F|73739.06|1994-11-05|1-URGENT|Clerk#000000840|0|ans detect carefully furiously final deposits: regular accoun|
+3333|92|F|197973.22|1992-09-16|4-NOT SPECIFIED|Clerk#000000157|0|ctions boost slyly quickly even accounts. deposits along|
+3334|76|O|28930.68|1996-02-18|5-LOW|Clerk#000000532|0|ounts maintain carefully. furiously close request|
+3335|49|O|112603.34|1995-10-15|3-MEDIUM|Clerk#000000694|0| deposits poach. ironic ideas about the carefully ironi|
+3360|103|O|168750.48|1998-01-23|5-LOW|Clerk#000000254|0| the deposits. fluffily bold requests cajole regula|
+3361|49|F|75026.51|1992-08-23|4-NOT SPECIFIED|Clerk#000000577|0|unts detect furiously instructions. slow deposi|
+3362|140|O|183176.60|1995-07-29|5-LOW|Clerk#000000011|0|the quickly pending deposits. silent, ev|
+3363|52|O|91017.61|1995-09-23|2-HIGH|Clerk#000000615|0|posits. ironic, final deposits are furiously slyly pending |
+3364|46|O|108412.57|1997-06-21|1-URGENT|Clerk#000000280|0|y even foxes? blithely stea|
+3365|82|F|174634.12|1994-11-09|2-HIGH|Clerk#000000126|0|he slyly regular foxes nag about the accounts. fluffily |
+3366|52|O|13603.08|1997-05-18|1-URGENT|Clerk#000000160|0| pinto beans upon the quickly expres|
+3367|73|F|101339.68|1992-12-31|4-NOT SPECIFIED|Clerk#000000029|0|efully blithely ironic pinto beans. carefully close |
+3392|74|O|96057.42|1995-10-28|1-URGENT|Clerk#000000325|0|es thrash blithely depths. bold multipliers wake f|
+3393|98|O|183104.71|1995-07-04|2-HIGH|Clerk#000000076|0|even requests. excuses are carefully deposits. fluf|
+3394|149|O|162165.94|1996-05-05|4-NOT SPECIFIED|Clerk#000000105|0| blithely among the attainments. carefully final accounts nag blit|
+3395|149|F|141486.77|1994-10-30|4-NOT SPECIFIED|Clerk#000000682|0|ideas haggle beside the ev|
+3396|149|F|196443.16|1994-05-21|3-MEDIUM|Clerk#000000868|0|uffily regular platelet|
+3397|130|F|80084.61|1994-06-23|3-MEDIUM|Clerk#000000048|0|yly. final deposits wake f|
+3398|67|O|1147.42|1996-09-23|1-URGENT|Clerk#000000818|0|uthless, special courts atop the unusual accounts grow fur|
+3399|122|P|56938.16|1995-02-28|4-NOT SPECIFIED|Clerk#000000575|0|the carefully sly accounts. regular, pending theodolites wa|
+3424|103|O|42410.57|1996-08-21|1-URGENT|Clerk#000000190|0|ven requests are quickly pending accounts. blithely furious requests |
+3425|115|O|157040.57|1996-03-31|4-NOT SPECIFIED|Clerk#000000188|0|ions. deposits nag blithely alongside of the carefully f|
+3426|53|O|91929.93|1996-10-16|3-MEDIUM|Clerk#000000283|0|alongside of the slyly|
+3427|4|O|133451.14|1997-05-29|4-NOT SPECIFIED|Clerk#000000404|0|y final pinto beans snooze fluffily bold asymptot|
+3428|10|O|88047.04|1996-04-07|5-LOW|Clerk#000000953|0|lar excuses. slyly pending ideas detect p|
+3429|146|O|141902.54|1997-01-06|4-NOT SPECIFIED|Clerk#000000737|0|l deposits cajole furiously enticing deposits. blithe packages haggle careful|
+3430|113|F|161066.22|1994-12-12|4-NOT SPECIFIED|Clerk#000000664|0| regular attainments are at the final foxes. final packages along the blithe|
+3431|47|F|45536.27|1993-08-22|1-URGENT|Clerk#000000439|0| sleep. slyly busy Tiresias a|
+3456|46|F|32796.35|1993-06-01|5-LOW|Clerk#000000924|0|es promise slyly. ironicall|
+3457|25|P|174223.20|1995-04-27|4-NOT SPECIFIED|Clerk#000000849|0|ely thin asymptotes. deposits kindle. pending|
+3458|95|F|153069.14|1994-12-22|2-HIGH|Clerk#000000392|0|rges snooze. slyly unusua|
+3459|119|F|127134.05|1994-07-28|4-NOT SPECIFIED|Clerk#000000777|0|n instructions? carefully regular excuses are blithely. silent, ironi|
+3460|82|O|245976.74|1995-10-03|2-HIGH|Clerk#000000078|0|ans integrate carefu|
+3461|100|F|190960.69|1993-01-31|1-URGENT|Clerk#000000504|0|al, bold deposits cajole fluffily fluffily final foxes. pending ideas beli|
+3462|133|O|63590.17|1997-05-17|3-MEDIUM|Clerk#000000657|0|uriously express asympto|
+3463|89|F|85255.56|1993-08-18|1-URGENT|Clerk#000000545|0|ding to the carefully ironic deposits|
+3488|148|F|92716.17|1995-01-08|3-MEDIUM|Clerk#000000694|0|cording to the carefully regular deposits. re|
+3489|109|F|62453.97|1993-07-29|3-MEDIUM|Clerk#000000307|0|s detect. carefully even platelets across the fur|
+3490|91|O|100106.96|1997-05-26|5-LOW|Clerk#000000703|0|gular ideas. furiously silent deposits across the unusual accounts boost i|
+3491|83|O|50287.06|1998-06-24|1-URGENT|Clerk#000000560|0|nic orbits believe carefully across the |
+3492|103|F|168721.45|1994-11-24|5-LOW|Clerk#000000066|0|packages along the regular foxes lose final dependencie|
+3493|82|F|41686.10|1993-08-24|2-HIGH|Clerk#000000887|0|lyly special accounts use blithely across the furiously sil|
+3494|49|F|136058.70|1993-04-04|5-LOW|Clerk#000000559|0|r instructions haggle. accounts cajole. carefully final requests at the |
+3495|31|O|58666.79|1996-02-26|2-HIGH|Clerk#000000441|0|nticing excuses are carefully|
+3520|125|O|151233.65|1997-08-04|1-URGENT|Clerk#000000023|0|hely. ideas nag; even, even fo|
+3521|7|F|142029.67|1992-10-26|5-LOW|Clerk#000000812|0|y even instructions cajole carefully above the bli|
+3522|26|F|151515.08|1994-09-26|5-LOW|Clerk#000000250|0|deposits-- slyly stealthy requests boost caref|
+3523|149|O|129657.08|1998-04-07|2-HIGH|Clerk#000000688|0|are on the carefully even depe|
+3524|94|F|22767.49|1992-05-03|2-HIGH|Clerk#000000607|0|efully unusual tithes among the foxes use blithely daringly bold deposits. re|
+3525|109|O|100749.60|1995-12-22|4-NOT SPECIFIED|Clerk#000000084|0|s nag among the blithely e|
+3526|56|F|53827.34|1995-03-16|5-LOW|Clerk#000000364|0|to the quickly special deposits print agai|
+3527|56|O|145232.09|1997-06-21|5-LOW|Clerk#000000874|0|regular ideas across the quickly bold theodo|
+3552|35|O|103656.44|1997-04-23|2-HIGH|Clerk#000000973|0| the ironic packages. furiously |
+3553|91|F|119838.14|1994-05-18|3-MEDIUM|Clerk#000000270|0|counts mold furiously. slyly i|
+3554|44|O|98335.61|1995-06-17|5-LOW|Clerk#000000931|0|hely ironic requests haggl|
+3555|46|O|134442.37|1996-07-07|5-LOW|Clerk#000000585|0|s nag carefully regular, even pinto be|
+3556|16|F|114681.55|1992-09-23|4-NOT SPECIFIED|Clerk#000000140|0|e. dependencies need to haggle alongs|
+3557|121|F|85477.89|1992-11-09|2-HIGH|Clerk#000000291|0|ithely courts. furi|
+3558|28|O|112912.00|1996-02-29|1-URGENT|Clerk#000000841|0|around the furiously even requests. quickl|
+3559|106|F|30722.49|1992-10-24|3-MEDIUM|Clerk#000000634|0|sly deposits. fluffily final ideas cajole careful|
+3584|13|O|80487.97|1997-08-11|1-URGENT|Clerk#000000760|0|fully bold packages. fluffily final braids haggle final, ironic dolphins. b|
+3585|139|F|159015.39|1994-11-23|2-HIGH|Clerk#000000988|0|regular asymptotes. bold pains above the carefully pending asymptot|
+3586|121|F|112845.04|1993-12-05|2-HIGH|Clerk#000000438|0|he quickly final courts. carefully regular requests nag unusua|
+3587|79|O|174798.97|1996-05-10|4-NOT SPECIFIED|Clerk#000000443|0|ular patterns detect |
+3588|119|F|207925.83|1995-03-19|4-NOT SPECIFIED|Clerk#000000316|0|ong the pains. evenly unusual |
+3589|31|F|39103.37|1994-05-26|2-HIGH|Clerk#000000023|0|ithe deposits nag furiously. furiously pending packages sleep f|
+3590|149|P|218482.70|1995-05-13|5-LOW|Clerk#000000986|0|lyly final deposits.|
+3591|136|F|98140.86|1993-12-08|3-MEDIUM|Clerk#000000144|0|ual foxes haggle! unusual request|
+3616|128|F|60933.29|1994-02-16|4-NOT SPECIFIED|Clerk#000000268|0|uickly about the quickly final requests. fluffily final packages wake evenly|
+3617|40|O|126205.42|1996-03-19|3-MEDIUM|Clerk#000000886|0|the carefully regular platelets ha|
+3618|10|O|136954.81|1997-12-13|3-MEDIUM|Clerk#000000894|0|. ideas run carefully. thin, pending |
+3619|149|O|222274.54|1996-11-20|2-HIGH|Clerk#000000211|0|uests mold after the blithely ironic excuses. slyly pending pa|
+3620|44|O|59291.75|1997-03-07|5-LOW|Clerk#000000124|0|le quickly against the epitaphs. requests sleep slyly according to the|
+3621|142|F|106150.05|1993-05-06|3-MEDIUM|Clerk#000000643|0|kly unusual deposits. qu|
+3622|91|O|109202.90|1995-11-27|5-LOW|Clerk#000000012|0|c deposits are fluffily about the blithely final theo|
+3623|4|O|175017.68|1996-12-26|1-URGENT|Clerk#000000184|0|- ironic excuses boost quickly in place |
+3648|125|F|180417.11|1993-06-17|5-LOW|Clerk#000000717|0|foxes. unusual deposits boost quickly. slyly regular asymptotes across t|
+3649|40|F|124470.32|1994-07-06|5-LOW|Clerk#000000349|0|taphs boost above the final p|
+3650|46|F|189547.57|1992-05-28|4-NOT SPECIFIED|Clerk#000000454|0|kages sleep fluffily slyly|
+3651|100|O|113191.45|1998-04-27|1-URGENT|Clerk#000000222|0|ly unusual deposits thrash quickly after the ideas.|
+3652|107|O|107732.23|1997-02-25|4-NOT SPECIFIED|Clerk#000000024|0|sly even requests after the |
+3653|40|F|142866.39|1994-03-27|1-URGENT|Clerk#000000402|0| pearls. bold accounts are along the ironic,|
+3654|7|F|222653.54|1992-06-03|5-LOW|Clerk#000000475|0|s cajole slyly carefully special theodolites. even deposits haggl|
+3655|49|F|74882.22|1992-10-06|1-URGENT|Clerk#000000815|0|er the carefully unusual deposits sleep quickly according to|
+3680|127|F|124402.59|1992-12-10|4-NOT SPECIFIED|Clerk#000000793|0|ular platelets. carefully regular packages cajole blithely al|
+3681|52|F|36889.65|1992-04-04|1-URGENT|Clerk#000000566|0|. ironic deposits against the ironic, regular frets use pending plat|
+3682|32|O|67525.43|1997-01-22|2-HIGH|Clerk#000000001|0|es haggle carefully. decoys nag |
+3683|88|F|99960.46|1993-03-04|2-HIGH|Clerk#000000248|0|ze across the express foxes. carefully special acco|
+3684|23|F|89509.91|1993-07-20|2-HIGH|Clerk#000000835|0|bold accounts affix along the carefully ironic requ|
+3685|16|F|154958.89|1992-01-17|3-MEDIUM|Clerk#000000954|0| sleep fluffily special ide|
+3686|40|O|82190.77|1998-07-07|2-HIGH|Clerk#000000175|0|s. furiously final pinto beans poach carefully among |
+3687|43|F|99851.38|1993-02-03|1-URGENT|Clerk#000000585|0|gular accounts. slyly regular instructions can are final ide|
+3712|64|F|127527.05|1992-01-02|2-HIGH|Clerk#000000032|0| promise according |
+3713|149|O|215342.63|1998-05-07|3-MEDIUM|Clerk#000000325|0|s haggle quickly. ironic, regular Tiresi|
+3714|40|O|84493.55|1998-05-01|3-MEDIUM|Clerk#000000595|0|nding accounts. ironic pinto beans wake slyly. furiously pendin|
+3715|65|O|64000.93|1996-03-18|1-URGENT|Clerk#000000463|0| always silent requests wake pinto beans. slyly pending foxes are aga|
+3716|43|O|146221.66|1997-08-19|4-NOT SPECIFIED|Clerk#000000748|0| pending ideas haggle. ironic,|
+3717|28|O|176525.53|1998-06-03|4-NOT SPECIFIED|Clerk#000000974|0|t the carefully even ideas use sp|
+3718|31|O|63195.54|1996-10-23|2-HIGH|Clerk#000000016|0|refully. furiously final packages use carefully slyly pending deposits! final,|
+3719|118|O|139902.71|1997-02-16|2-HIGH|Clerk#000000034|0|, enticing accounts are blithely among the daringly final asymptotes. furious|
+3744|65|F|33085.68|1992-01-10|3-MEDIUM|Clerk#000000765|0|osits sublate about the regular requests. fluffily unusual accou|
+3745|112|F|19405.73|1993-09-29|5-LOW|Clerk#000000181|0|ckages poach slyly against the foxes. slyly ironic instructi|
+3746|74|F|80018.54|1994-09-11|4-NOT SPECIFIED|Clerk#000000188|0|. express, special requests nag quic|
+3747|149|O|204355.65|1996-08-20|1-URGENT|Clerk#000000226|0|refully across the final theodolites. carefully bold accounts cajol|
+3748|53|O|83804.38|1998-02-28|1-URGENT|Clerk#000000156|0|slyly special packages|
+3749|38|P|87073.89|1995-02-24|3-MEDIUM|Clerk#000000639|0|y regular instructions haggle blithel|
+3750|97|P|177181.67|1995-04-30|3-MEDIUM|Clerk#000000885|0|y. express, even packages wake after the ide|
+3751|10|F|202917.72|1994-04-27|4-NOT SPECIFIED|Clerk#000000925|0|sheaves. express, unusual t|
+3776|85|F|150349.92|1992-11-20|2-HIGH|Clerk#000000698|0|efully even platelets slee|
+3777|28|F|82467.29|1994-04-08|3-MEDIUM|Clerk#000000941|0| regular, special dolphins cajole enticingly ca|
+3778|106|F|221036.31|1993-05-26|1-URGENT|Clerk#000000187|0| above the express requests. packages maintain fluffily according to|
+3779|74|O|31538.94|1997-01-05|4-NOT SPECIFIED|Clerk#000000670|0| against the deposits. quickly bold instructions x-ray. pending fox|
+3780|41|O|65385.42|1996-04-13|5-LOW|Clerk#000000967|0| around the brave, pendin|
+3781|139|O|133864.82|1996-06-20|1-URGENT|Clerk#000000394|0|yly after the ruthless packages. pinto beans use slyly: never ironic dependenc|
+3782|65|O|145096.17|1996-08-24|1-URGENT|Clerk#000000121|0|counts are. pending, regular asym|
+3783|44|F|155017.92|1993-12-06|4-NOT SPECIFIED|Clerk#000000614|0| along the pinto beans. special packages use. regular theo|
+3808|79|F|228054.01|1994-04-24|1-URGENT|Clerk#000000717|0|odolites. blithely ironic cour|
+3809|148|O|143070.70|1996-05-01|5-LOW|Clerk#000000646|0| regular excuses. even theodolites are fluffily according to t|
+3810|100|F|124675.27|1992-09-17|1-URGENT|Clerk#000000660|0|ters sleep across the carefully final |
+3811|80|O|154967.89|1998-04-16|3-MEDIUM|Clerk#000000290|0|sits wake slyly abo|
+3812|41|O|70502.52|1996-08-13|3-MEDIUM|Clerk#000000727|0|al, final requests cajole|
+3813|146|O|77247.05|1998-06-29|1-URGENT|Clerk#000000531|0|g the furiously regular instructions|
+3814|118|P|149451.88|1995-02-22|5-LOW|Clerk#000000669|0| the furiously pending theodo|
+3815|104|O|14275.01|1997-08-26|1-URGENT|Clerk#000000249|0|es snooze carefully stealth|
+3840|100|O|187156.38|1998-07-17|4-NOT SPECIFIED|Clerk#000000713|0|yly slow theodolites. enticingly |
+3841|58|F|129033.13|1994-10-05|4-NOT SPECIFIED|Clerk#000000018|0| bold requests sleep quickly ironic packages. sometimes regular deposits nag |
+3842|28|F|131447.03|1992-04-09|5-LOW|Clerk#000000418|0|silent ideas. final deposits use furiously. blithely express excuses cajole fu|
+3843|10|O|34035.17|1997-01-04|4-NOT SPECIFIED|Clerk#000000693|0|eodolites; slyly unusual accounts nag boldly |
+3844|79|F|6793.45|1994-12-29|1-URGENT|Clerk#000000686|0|r dolphins. slyly ironic theodolites ag|
+3845|89|F|134333.33|1992-04-26|1-URGENT|Clerk#000000404|0|es among the pending, regular accounts sleep blithely blithely even de|
+3846|49|O|123120.06|1998-02-05|2-HIGH|Clerk#000000877|0|y alongside of the slyl|
+3847|34|F|7014.31|1993-03-12|5-LOW|Clerk#000000338|0|uriously even deposits. furiously pe|
+3872|134|O|198538.68|1996-09-06|5-LOW|Clerk#000000943|0|counts boost slyly against the ironic platelets-- blithely p|
+3873|55|O|95291.79|1998-03-30|4-NOT SPECIFIED|Clerk#000000791|0|express deposits-- even ideas |
+3874|119|F|66455.34|1993-06-09|3-MEDIUM|Clerk#000000208|0|ular asymptotes sleep blithely ironic ideas. blithel|
+3875|118|O|74483.95|1997-09-10|1-URGENT|Clerk#000000587|0| solve among the fluffily even |
+3876|29|O|95126.32|1996-08-02|5-LOW|Clerk#000000708|0|into beans. blithely|
+3877|17|F|178492.01|1993-05-21|5-LOW|Clerk#000000652|0|foxes. thinly bold reques|
+3878|88|O|59989.66|1997-03-23|1-URGENT|Clerk#000000314|0|e carefully regular platelets. special, express dependencies slee|
+3879|142|O|80274.22|1995-11-23|1-URGENT|Clerk#000000231|0|sts along the quickly ironic sentiments cajole carefully according to t|
+3904|149|O|39338.44|1997-11-15|4-NOT SPECIFIED|Clerk#000000883|0|sits haggle furiously across the requests. theodolites ha|
+3905|22|F|56227.04|1993-12-21|4-NOT SPECIFIED|Clerk#000000573|0|usly even accounts lose quietly above the slyly express p|
+3906|46|F|145630.76|1992-05-28|3-MEDIUM|Clerk#000000867|0|ironic theodolites haggle blithely above the final re|
+3907|67|F|240457.56|1992-08-19|3-MEDIUM|Clerk#000000084|0|gular pinto beans sleep f|
+3908|43|F|57127.71|1993-03-09|3-MEDIUM|Clerk#000000490|0|ounts cajole. regularly|
+3909|22|O|82746.74|1998-07-27|1-URGENT|Clerk#000000980|0|nic, special theodolites sleep furiously! furiously |
+3910|64|O|47272.67|1996-08-26|3-MEDIUM|Clerk#000000270|0|ickly. furiously final packag|
+3911|10|P|35019.95|1995-03-17|4-NOT SPECIFIED|Clerk#000000818|0|he fluffily final forges haggle slyly according to the blithely|
+3936|32|O|168618.39|1996-11-07|2-HIGH|Clerk#000000200|0|iously express packages engage slyly fina|
+3937|94|O|187516.29|1997-11-30|4-NOT SPECIFIED|Clerk#000000189|0|ckages boost carefully blithely q|
+3938|31|F|46918.22|1993-03-03|1-URGENT|Clerk#000000199|0|. unusual, final foxes haggle|
+3939|70|O|8720.45|1996-01-11|5-LOW|Clerk#000000647|0|ly ruthlessly silent requests. blithely regular requests haggle blithely wh|
+3940|149|O|129012.84|1996-02-14|5-LOW|Clerk#000000363|0|e above the ideas. quickly even dependencies along the blithely ir|
+3941|136|O|95453.80|1996-08-29|2-HIGH|Clerk#000000503|0|gular theodolites integrate quickly |
+3942|76|F|38596.81|1993-06-28|4-NOT SPECIFIED|Clerk#000000608|0|eas cajole bold requests. idly silent instructions |
+3943|40|O|60314.97|1996-10-09|5-LOW|Clerk#000000482|0|se alongside of the final pinto beans. regular packages boost across the ca|
+3968|25|O|121704.45|1997-02-17|4-NOT SPECIFIED|Clerk#000000431|0| the slyly special accounts; |
+3969|52|O|169797.40|1997-05-14|2-HIGH|Clerk#000000731|0|uriously final dependencies slee|
+3970|76|F|163709.85|1992-03-27|3-MEDIUM|Clerk#000000190|0|luffily furiously regular deposits. blithely special requests cajole blithely|
+3971|104|O|47925.47|1996-06-28|5-LOW|Clerk#000000287|0|alongside of the instructions ought to are |
+3972|124|F|1861.19|1994-04-21|3-MEDIUM|Clerk#000000049|0|y regular requests haggle quickly. pending, express acco|
+3973|103|F|91541.48|1992-03-24|4-NOT SPECIFIED|Clerk#000000114|0|somas according to the quickly even instructions wake fu|
+3974|94|O|56779.06|1996-03-05|4-NOT SPECIFIED|Clerk#000000938|0|deposits are furiously beneath the bl|
+3975|118|O|37804.43|1995-04-11|3-MEDIUM|Clerk#000000016|0|ts. regular, regular Tiresias play furiously. ironi|
+4000|70|F|84053.93|1992-01-04|5-LOW|Clerk#000000339|0|le carefully closely even pinto beans. regular, ironic foxes against the|
+4001|115|O|95929.46|1997-05-15|3-MEDIUM|Clerk#000000878|0|detect. asymptotes sleep furio|
+4002|104|O|76518.11|1997-04-08|5-LOW|Clerk#000000097|0| regular braids are. furiously even patterns agains|
+4003|112|F|17603.01|1993-01-27|1-URGENT|Clerk#000000177|0| blithe theodolites are slyly. slyly silent accounts toward|
+4004|70|F|220715.14|1993-05-07|3-MEDIUM|Clerk#000000273|0|accounts among the blithely regular sentiments |
+4005|140|O|129062.13|1996-11-20|2-HIGH|Clerk#000000341|0|ily according to the slyly iron|
+4006|35|F|70557.05|1995-01-04|3-MEDIUM|Clerk#000000765|0|ly ironic packages integrate. regular requests alongside of |
+4007|8|F|116193.97|1993-06-18|2-HIGH|Clerk#000000623|0|ecial packages. slyly regular accounts integrate |
+4032|10|O|62497.51|1998-02-26|3-MEDIUM|Clerk#000000686|0|iresias sleep slyly regular ideas. quickly unusual|
+4033|83|F|57740.74|1993-06-02|5-LOW|Clerk#000000181|0|ously bold instructions haggle furiously above the fluf|
+4034|94|F|186912.51|1993-11-14|4-NOT SPECIFIED|Clerk#000000548|0|ts x-ray. express requests affix fluffily regular theodolites. pending, fina|
+4035|118|F|22840.21|1992-02-19|5-LOW|Clerk#000000097|0|he ironic deposits sleep blith|
+4036|47|O|82563.10|1997-04-26|3-MEDIUM|Clerk#000000398|0|ly express deposits nag slyly. ironic, final asymptotes boost bra|
+4037|121|F|36389.43|1993-03-24|2-HIGH|Clerk#000000384|0|t carefully above the unusual the|
+4038|94|O|155045.39|1996-01-06|1-URGENT|Clerk#000000272|0|re slyly. silent requests wake quickly. regular packages play quickly |
+4039|29|O|143753.01|1997-11-16|1-URGENT|Clerk#000000358|0|ly ironic deposits. ironic reques|
+4064|130|O|148500.71|1996-10-10|4-NOT SPECIFIED|Clerk#000000598|0|ccounts. furiously unusual theodolites wake carefully about|
+4065|80|F|156345.64|1994-06-09|1-URGENT|Clerk#000000131|0|even foxes! slyly final deposits agai|
+4066|32|O|176911.21|1997-01-27|4-NOT SPECIFIED|Clerk#000000286|0|yly ironic dinos. quickly regular accounts haggle. requests wa|
+4067|16|F|136517.34|1992-10-07|2-HIGH|Clerk#000000027|0|tes boost furiously quick asymptotes. final deposits of the dolphins solv|
+4068|125|O|71852.67|1996-09-18|3-MEDIUM|Clerk#000000203|0|lly even accounts wake furiously across the unusual platelets. unusu|
+4069|73|F|198816.13|1992-05-13|3-MEDIUM|Clerk#000000359|0|deposits: slyly bold ideas detect furiously. f|
+4070|29|O|98275.37|1995-06-12|2-HIGH|Clerk#000000713|0|xpress ideas poach ab|
+4071|148|O|67789.42|1996-09-15|4-NOT SPECIFIED|Clerk#000000486|0|nal deposits. pending deposits d|
+4096|139|F|81089.61|1992-07-03|4-NOT SPECIFIED|Clerk#000000706|0|sits. quickly thin deposits x-ray blith|
+4097|10|O|134308.04|1996-05-24|1-URGENT|Clerk#000000475|0|ickly under the even accounts. even packages after the furiously express|
+4098|23|O|48478.54|1996-11-05|4-NOT SPECIFIED|Clerk#000000491|0|otes. quickly final requests after the stealthily ironic pinto bean|
+4099|17|F|207364.80|1992-08-21|1-URGENT|Clerk#000000379|0|r platelets. slyly regular requests cajole carefully against the|
+4100|4|O|3892.77|1996-03-12|3-MEDIUM|Clerk#000000429|0|posits. carefully unusual packages use pending deposits. regular she|
+4101|142|F|21640.10|1993-11-22|4-NOT SPECIFIED|Clerk#000000704|0|y around the express, careful epitaphs. accounts use fluffily. quickly p|
+4102|22|O|128786.57|1996-03-17|1-URGENT|Clerk#000000675|0|nding dependencies was slyly about the bl|
+4103|106|F|38164.23|1992-07-03|5-LOW|Clerk#000000679|0|fully ironic dependencies.|
+4128|139|O|5472.17|1995-10-07|4-NOT SPECIFIED|Clerk#000000635|0|ctions. dependencies from the slyly regular accounts nag slyly fu|
+4129|32|F|67226.28|1993-06-26|3-MEDIUM|Clerk#000000541|0|nwind. quickly final theodolites use packages. accounts|
+4130|104|O|47823.04|1996-03-10|5-LOW|Clerk#000000609|0|omise alongside of the carefully final foxes. blithel|
+4131|44|O|145971.60|1998-01-30|1-URGENT|Clerk#000000612|0| above the foxes hang |
+4132|19|P|65601.08|1995-05-29|4-NOT SPECIFIED|Clerk#000000158|0|ld asymptotes solve alongside of the express, final packages. fluffily fi|
+4133|101|F|31693.88|1992-08-07|4-NOT SPECIFIED|Clerk#000000268|0|al, express foxes. quickly pending deposits might cajole alongsi|
+4134|97|F|125191.12|1995-01-12|1-URGENT|Clerk#000000171|0|fully even deposits. regular de|
+4135|37|O|99577.55|1997-03-10|3-MEDIUM|Clerk#000000627|0|ly quietly even ideas. deposits haggle blithely|
+4160|55|O|82493.07|1996-08-20|5-LOW|Clerk#000000283|0|the carefully special accounts. furiously regular dugouts alongs|
+4161|118|F|198995.21|1993-08-21|5-LOW|Clerk#000000047|0|nts. fluffily regular foxes above the quickly daring reques|
+4162|22|F|72359.55|1992-02-10|5-LOW|Clerk#000000179|0|r packages are slyly accounts. furiously special foxes detect carefully re|
+4163|64|F|11493.80|1992-12-21|2-HIGH|Clerk#000000268|0| integrate furiously slyly regular depende|
+4164|94|O|8709.16|1998-07-03|2-HIGH|Clerk#000000720|0| regularly busy theodolites boost furiously quickly bold packages. express, s|
+4165|4|O|11405.40|1997-07-25|3-MEDIUM|Clerk#000000621|0|special foxes affix never blithely ironic pinto beans; blithely |
+4166|43|F|100671.06|1993-02-28|5-LOW|Clerk#000000757|0|quickly sly forges impress. careful foxes across the blithely even a|
+4167|28|O|62108.45|1998-06-17|1-URGENT|Clerk#000000917|0|kly furiously even deposits. unu|
+4192|146|O|197192.95|1998-04-19|1-URGENT|Clerk#000000369|0|equests above the slyly regular pinto beans unwi|
+4193|4|F|143191.54|1994-01-09|2-HIGH|Clerk#000000201|0|ng accounts haggle quickly. packages use fluffily ironic excu|
+4194|106|F|62972.29|1994-10-16|3-MEDIUM|Clerk#000000385|0| instructions are quickly even pinto beans. courts boost furiously regular, ev|
+4195|104|F|54478.95|1993-05-29|4-NOT SPECIFIED|Clerk#000000777|0| pinto beans cajole furiously theodolites-- slyly regular deposits doub|
+4196|106|O|201455.98|1998-05-15|3-MEDIUM|Clerk#000000532|0|affix carefully. quickly final requests |
+4197|92|O|217709.03|1996-08-13|4-NOT SPECIFIED|Clerk#000000264|0| pinto beans according|
+4198|143|O|105789.01|1997-06-16|3-MEDIUM|Clerk#000000583|0|g the special packages haggle pen|
+4199|5|F|30494.62|1992-02-13|1-URGENT|Clerk#000000309|0|e blithely. special deposits haggle slyly final foxes. carefully even|
+4224|70|O|150655.44|1997-07-14|1-URGENT|Clerk#000000034|0|jole quickly final dolphins. slyly pending foxes wake furiously bold pl|
+4225|128|O|72533.07|1997-06-03|3-MEDIUM|Clerk#000000992|0|r the platelets nag among the special deposits. ironic, ironic re|
+4226|92|F|29827.44|1993-03-09|5-LOW|Clerk#000000203|0|phins wake slyly regular packages. deposits haggle slowl|
+4227|133|F|92261.08|1995-02-24|1-URGENT|Clerk#000000063|0|ng the requests; ideas haggle fluffily. slyly unusual ideas c|
+4228|110|O|22072.16|1997-03-28|5-LOW|Clerk#000000309|0|pecial requests aft|
+4229|14|O|75145.87|1998-03-03|1-URGENT|Clerk#000000301|0|p furiously: final excuses hagg|
+4230|140|F|219709.60|1992-03-04|1-URGENT|Clerk#000000364|0|lly ironic deposits integrate carefully about the fu|
+4231|86|O|111403.66|1997-11-20|4-NOT SPECIFIED|Clerk#000000630|0|ly final accounts cajole furiously accounts. bravely ironic platelets am|
+4256|118|F|23067.48|1992-04-05|4-NOT SPECIFIED|Clerk#000000043|0|y alongside of the fluffily iro|
+4257|17|P|41723.86|1995-03-25|3-MEDIUM|Clerk#000000682|0|r ideas cajole along the blithely regular gifts.|
+4258|92|O|133829.35|1996-10-27|4-NOT SPECIFIED|Clerk#000000364|0|efully final platelets around the blit|
+4259|104|O|12918.70|1997-10-09|5-LOW|Clerk#000000781|0|es snooze slyly against the furiously unusual ideas. furious|
+4260|142|F|18566.14|1992-05-16|4-NOT SPECIFIED|Clerk#000000919|0|e among the fluffily bold accounts.|
+4261|118|F|83665.20|1992-10-03|1-URGENT|Clerk#000000662|0| about the even, pending packages. slyly bold deposits boost|
+4262|88|O|176278.57|1996-08-04|3-MEDIUM|Clerk#000000239|0| of the furious accounts. furiously regular accounts w|
+4263|4|O|158885.83|1998-03-16|1-URGENT|Clerk#000000265|0|sly ruthless deposits. final packages are instructions. fu|
+4288|34|F|75030.81|1992-12-04|4-NOT SPECIFIED|Clerk#000000823|0|usly carefully even theodolites: slyly express pac|
+4289|125|F|20752.62|1993-10-07|3-MEDIUM|Clerk#000000912|0|e carefully close instructions. slyly special reques|
+4290|41|F|26128.99|1995-01-15|3-MEDIUM|Clerk#000000688|0| slyly quickly bold requests. final deposits haggle pending ideas! som|
+4291|89|F|71822.86|1993-11-29|3-MEDIUM|Clerk#000000655|0| sleep fluffily between the bold packages. bold|
+4292|25|F|145906.24|1992-01-09|3-MEDIUM|Clerk#000000794|0| ruthlessly. slyly bo|
+4293|103|O|198322.91|1996-08-20|2-HIGH|Clerk#000000750|0|ly packages. regular packages nag according to t|
+4294|49|F|232194.74|1992-08-15|3-MEDIUM|Clerk#000000407|0|ng pinto beans breach. slyly express requests bo|
+4295|5|O|77754.62|1996-02-10|3-MEDIUM|Clerk#000000023|0|e boldly bold dependencies|
+4320|115|O|67049.37|1996-12-08|4-NOT SPECIFIED|Clerk#000000223|0|ages haggle after the slowly bold se|
+4321|16|F|118896.95|1994-07-18|3-MEDIUM|Clerk#000000041|0|ending deposits are carefully carefully regular packa|
+4322|142|O|149671.92|1998-03-13|3-MEDIUM|Clerk#000000433|0|totes nag across the fluffily special instructions. quickly silent hockey |
+4323|104|F|27598.17|1994-01-23|2-HIGH|Clerk#000000282|0|lve after the slyly regular multipliers. even, regular excus|
+4324|73|O|178249.05|1995-07-17|1-URGENT|Clerk#000000800|0|ccounts. slyly stealthy requests shall have t|
+4325|130|O|20214.49|1996-07-18|2-HIGH|Clerk#000000591|0|y around the always ev|
+4326|29|O|39048.94|1996-10-27|4-NOT SPECIFIED|Clerk#000000869|0|packages. carefully express deposit|
+4327|146|P|126235.35|1995-03-16|2-HIGH|Clerk#000000571|0|yly pending braids. final requests abo|
+4352|14|O|18653.09|1997-11-26|2-HIGH|Clerk#000000620|0|ly final platelets integrate carefully even requ|
+4353|73|O|21815.30|1997-12-12|2-HIGH|Clerk#000000790|0|uickly even ideas cajole|
+4354|145|F|179827.12|1994-09-30|4-NOT SPECIFIED|Clerk#000000046|0|pending notornis. requests serve |
+4355|4|O|186370.23|1996-11-16|1-URGENT|Clerk#000000362|0|ndencies use furiously across the regular |
+4356|97|F|39828.51|1994-04-11|5-LOW|Clerk#000000956|0| asymptotes sleep blithely. asymptotes sleep. blithely regul|
+4357|47|O|67045.94|1997-10-23|4-NOT SPECIFIED|Clerk#000000031|0|ages nag between the|
+4358|25|O|46298.53|1997-08-12|1-URGENT|Clerk#000000692|0|according to the fluffily special asymptotes |
+4359|16|F|107824.40|1993-03-03|1-URGENT|Clerk#000000393|0|sts. special, unusual deposits across the ironic theodo|
+4384|25|F|52562.16|1992-07-13|1-URGENT|Clerk#000000192|0|onic platelets. furiously regular asymptotes according to the special pac|
+4385|122|O|39190.62|1996-08-06|2-HIGH|Clerk#000000597|0|ully final requests. ironic, even dolphins above the regular |
+4386|61|O|134413.58|1998-02-06|5-LOW|Clerk#000000070|0| dolphins. silent, idle pinto beans |
+4387|110|O|116740.67|1995-10-23|1-URGENT|Clerk#000000025|0|ter the regular pinto beans. special, final gifts above the requests wi|
+4388|10|O|69668.22|1996-03-28|2-HIGH|Clerk#000000715|0|ts wake against the carefully final accounts. sly|
+4389|55|F|120324.82|1994-05-05|3-MEDIUM|Clerk#000000403|0|wly express excuses after the permanently even instructions are|
+4390|7|P|140608.69|1995-05-23|1-URGENT|Clerk#000000691|0|inal pinto beans. exp|
+4391|38|F|48284.06|1992-02-18|2-HIGH|Clerk#000000880|0|regular accounts. even depo|
+4416|149|F|76067.10|1992-06-30|5-LOW|Clerk#000000391|0| deposits. ideas cajole express theodolites: |
+4417|67|O|60868.39|1998-07-09|1-URGENT|Clerk#000000365|0|ideas are alongside of the blithely final reque|
+4418|61|F|47099.71|1993-03-25|3-MEDIUM|Clerk#000000731|0|pecial pinto beans. close foxes affix iron|
+4419|104|O|94030.43|1996-06-12|4-NOT SPECIFIED|Clerk#000000410|0|ages wake furiously slyly thin theodolit|
+4420|109|F|6088.41|1994-06-18|1-URGENT|Clerk#000000706|0|lly bold deposits along the bold, pending foxes detect blithely after the acco|
+4421|10|O|258779.02|1997-04-04|3-MEDIUM|Clerk#000000246|0|t the pending warhorses. express waters a|
+4422|70|P|107140.22|1995-05-22|3-MEDIUM|Clerk#000000938|0|ly bold accounts sleep special, regular foxes. doggedly regular in|
+4423|64|F|4913.06|1995-02-17|5-LOW|Clerk#000000888|0|excuses are ruthless|
+4448|70|O|127191.47|1998-05-21|2-HIGH|Clerk#000000428|0|. deposits haggle around the silent packages; slyly unusual packages|
+4449|10|O|48206.14|1998-02-08|5-LOW|Clerk#000000035|0|ourts are carefully even deposits. pending |
+4450|106|O|110194.31|1997-07-15|1-URGENT|Clerk#000000867|0|quests boost. furiously even realms are blithely bold requests. bl|
+4451|4|F|92851.80|1994-10-01|1-URGENT|Clerk#000000181|0|. carefully final foxes along the quickly express T|
+4452|13|F|64838.66|1994-06-21|5-LOW|Clerk#000000985|0|oxes are slyly. express, ironic pinto beans wake after the quickly pending re|
+4453|65|O|137030.40|1997-04-01|3-MEDIUM|Clerk#000000603|0|ages could have to nag slyly furiously even asymptotes! slowly regular |
+4454|142|F|159578.94|1994-02-02|5-LOW|Clerk#000000411|0|uriously regular pint|
+4455|19|F|102534.63|1993-10-11|3-MEDIUM|Clerk#000000924|0|even requests. bravely regular foxes according to the carefully unusual |
+4480|85|F|28658.26|1994-03-31|4-NOT SPECIFIED|Clerk#000000534|0|press, bold deposits boost blit|
+4481|148|O|77705.40|1996-03-30|5-LOW|Clerk#000000443|0|press sheaves cajole furio|
+4482|82|P|63535.56|1995-05-15|4-NOT SPECIFIED|Clerk#000000534|0|ravely bold accounts. furiously ironic instructions affix quickly. pend|
+4483|52|F|126597.21|1992-03-07|3-MEDIUM|Clerk#000000615|0|its. blithely idle accounts run; theodolites wake carefully around the fi|
+4484|131|O|237947.61|1996-12-24|1-URGENT|Clerk#000000392|0|ct across the pinto beans. quickly pending excuses engage furiously.|
+4485|53|F|182432.17|1994-11-13|3-MEDIUM|Clerk#000000038|0|es wake slyly even packages. blithely brave requests nag above the regul|
+4486|37|O|135613.18|1998-03-03|2-HIGH|Clerk#000000656|0|ffily according to the carefully pending acc|
+4487|46|F|109469.90|1993-02-23|3-MEDIUM|Clerk#000000017|0|s up the never pending excuses wake furiously special pinto beans. furiously i|
+4512|70|O|148682.82|1995-10-25|5-LOW|Clerk#000000393|0|ending instructions maintain fu|
+4513|85|O|119820.38|1996-03-15|5-LOW|Clerk#000000154|0|ests. final, final ideas|
+4514|97|F|143899.85|1994-04-30|3-MEDIUM|Clerk#000000074|0|deposits according to the carefull|
+4515|140|F|161745.44|1992-03-17|1-URGENT|Clerk#000000191|0|quests among the accounts sleep boldly about the regular f|
+4516|130|F|35949.14|1994-03-29|3-MEDIUM|Clerk#000000739|0|ing packages sleep slyly regular attainments|
+4517|113|O|47614.08|1998-03-07|4-NOT SPECIFIED|Clerk#000000231|0|uriously final deposits doze furiously furiously reg|
+4518|125|O|25861.74|1997-05-01|3-MEDIUM|Clerk#000000187|0|luffily against the spec|
+4519|136|F|68885.66|1993-03-30|4-NOT SPECIFIED|Clerk#000000938|0|ccording to the final |
+4544|112|O|151148.81|1997-08-07|3-MEDIUM|Clerk#000000435|0|g dependencies dazzle slyly ironic somas. carefu|
+4545|59|F|143276.28|1993-01-17|4-NOT SPECIFIED|Clerk#000000303|0|ep. requests use sly|
+4546|43|O|39906.87|1995-07-29|5-LOW|Clerk#000000373|0|ns sleep. regular, regular instructions maintai|
+4547|109|F|52114.01|1993-08-23|3-MEDIUM|Clerk#000000519|0|uctions thrash platelets. slyly final foxes wake slyly against th|
+4548|127|O|139915.23|1996-06-28|5-LOW|Clerk#000000798|0| in place of the blithely express sentiments haggle slyly r|
+4549|64|O|43889.17|1998-03-05|4-NOT SPECIFIED|Clerk#000000965|0|ully even deposits dazzle. fluffily pending ideas against the requests|
+4550|118|F|27461.48|1994-12-29|2-HIGH|Clerk#000000748|0|s haggle carefully acco|
+4551|109|O|82824.14|1996-02-09|2-HIGH|Clerk#000000462|0|ts. slyly quick theodolite|
+4576|139|O|56936.10|1996-08-14|5-LOW|Clerk#000000798|0|e pending deposits. |
+4577|79|O|104259.88|1998-05-02|5-LOW|Clerk#000000409|0|ly. unusual platelets are alw|
+4578|91|F|95761.93|1992-09-13|5-LOW|Clerk#000000121|0| to the furiously ironic instructions? furiou|
+4579|106|O|85927.85|1995-12-01|2-HIGH|Clerk#000000951|0|its wake quickly blithely specia|
+4580|82|F|118464.65|1993-11-15|4-NOT SPECIFIED|Clerk#000000086|0|rs wake blithely regular requests. fluffily ev|
+4581|79|F|89592.11|1992-09-04|4-NOT SPECIFIED|Clerk#000000687|0|ges. carefully pending accounts use furiously abo|
+4582|19|O|18247.86|1996-07-04|1-URGENT|Clerk#000000638|0|g the furiously regular pac|
+4583|22|F|206495.43|1994-09-25|3-MEDIUM|Clerk#000000240|0|equests. slyly even platelets was qui|
+4608|80|F|157767.86|1994-06-17|1-URGENT|Clerk#000000259|0|y even instructions detect slyly asymptotes. blithely final packa|
+4609|133|O|70462.84|1996-12-05|3-MEDIUM|Clerk#000000239|0|hang slyly slyly expre|
+4610|26|F|135934.60|1993-06-18|5-LOW|Clerk#000000616|0|e carefully express pinto|
+4611|29|F|166506.22|1993-01-10|2-HIGH|Clerk#000000152|0|. furiously regular instructions haggle dolphins. even instructions det|
+4612|61|F|82598.87|1993-09-20|3-MEDIUM|Clerk#000000397|0|bove the deposits. even deposits dazzle. slyly express packages haggle sl|
+4613|133|O|212339.55|1998-03-05|3-MEDIUM|Clerk#000000541|0|furiously blithely pending dependen|
+4614|61|O|151801.06|1996-04-22|1-URGENT|Clerk#000000974|0| sauternes wake thinly special accounts. fur|
+4615|29|F|10500.27|1993-08-27|3-MEDIUM|Clerk#000000982|0|jole after the fluffily pending foxes. packages affix carefully acco|
+4640|97|O|81138.17|1996-01-01|5-LOW|Clerk#000000902|0|requests. deposits do detect above the blithely iron|
+4641|134|F|98485.21|1993-01-20|4-NOT SPECIFIED|Clerk#000000755|0|ronic, final requests integrate slyly: specia|
+4642|148|F|117537.87|1995-02-27|1-URGENT|Clerk#000000295|0|cial requests wake carefully around the regular, unusual ideas. furi|
+4643|67|O|52414.19|1995-06-30|2-HIGH|Clerk#000000292|0|ously regular packages. unusual, special platel|
+4644|94|O|85901.70|1998-01-17|5-LOW|Clerk#000000961|0|requests. fluffily even ideas bo|
+4645|44|F|231012.22|1994-09-20|1-URGENT|Clerk#000000764|0|fully even instructions. final gifts sublate quickly final requests. bl|
+4646|83|O|124637.19|1996-06-18|1-URGENT|Clerk#000000036|0|n place of the blithely qu|
+4647|28|F|110958.36|1994-05-14|3-MEDIUM|Clerk#000000626|0|out the deposits. slyly final pinto beans haggle idly. slyly s|
+4672|79|O|199593.71|1995-11-07|1-URGENT|Clerk#000000475|0|lyly final dependencies caj|
+4673|82|O|58094.75|1996-08-13|4-NOT SPECIFIED|Clerk#000000914|0|c deposits are slyly. bravely ironic deposits cajole carefully after the |
+4674|37|F|115411.37|1994-04-19|1-URGENT|Clerk#000000122|0|careful hockey players. carefully pending deposits caj|
+4675|86|F|68817.08|1993-11-25|4-NOT SPECIFIED|Clerk#000000741|0|al deposits haggle slyly final|
+4676|14|O|182025.95|1995-09-01|2-HIGH|Clerk#000000407|0|s. slyly bold accounts sleep furiously special|
+4677|40|O|25661.87|1998-02-21|3-MEDIUM|Clerk#000000245|0|ly pending deposits after the carefully regular foxes sleep blithely after t|
+4678|88|O|131752.07|1998-08-02|4-NOT SPECIFIED|Clerk#000000175|0|side of the bold platelets detect slyly blithely ironic e|
+4679|88|F|7211.59|1993-01-20|2-HIGH|Clerk#000000905|0|ely regular accounts affix slyly. final dolphins are. furiously final de|
+4704|2|O|63873.14|1996-08-16|4-NOT SPECIFIED|Clerk#000000256|0|lithely final requests about the fluffily regular |
+4705|98|F|173340.09|1992-03-22|4-NOT SPECIFIED|Clerk#000000522|0| special instructions poa|
+4706|25|F|101709.52|1992-12-29|4-NOT SPECIFIED|Clerk#000000722|0| packages above the never regular packages nag packages. deposits c|
+4707|91|F|61052.10|1995-02-27|2-HIGH|Clerk#000000943|0|ully enticing accounts behind the regular|
+4708|85|F|56998.36|1994-10-01|1-URGENT|Clerk#000000383|0|ly thinly even accounts. unusu|
+4709|26|O|49903.57|1996-01-08|3-MEDIUM|Clerk#000000785|0|he furiously even deposits! ironic theodolites haggle blithely. r|
+4710|100|F|88966.68|1994-12-08|4-NOT SPECIFIED|Clerk#000000734|0|the final, regular foxes. carefully ironic pattern|
+4711|142|O|129546.56|1998-05-06|1-URGENT|Clerk#000000818|0|mptotes. unusual packages wake furiously qui|
+4736|139|O|67572.73|1995-11-20|2-HIGH|Clerk#000000563|0|blithely regular courts affix into the carefully ironic deposits. slyly exp|
+4737|79|F|62014.51|1993-03-11|4-NOT SPECIFIED|Clerk#000000275|0|ents use slyly among the unusual, ironic pearls. furiously pending |
+4738|5|F|149466.62|1992-04-08|2-HIGH|Clerk#000000150|0|deposits. thin acco|
+4739|148|F|68255.82|1993-02-21|5-LOW|Clerk#000000872|0|ing to the pending attainments: pending, express account|
+4740|68|O|42579.40|1996-07-05|2-HIGH|Clerk#000000420|0| dependencies haggle about the|
+4741|127|F|180692.90|1992-07-07|4-NOT SPECIFIED|Clerk#000000983|0|ly bold deposits are slyly about the r|
+4742|64|P|155356.80|1995-03-23|3-MEDIUM|Clerk#000000058|0|n packages. quickly regular ideas cajole blithely|
+4743|97|F|65702.39|1993-03-31|5-LOW|Clerk#000000048|0|pinto beans above the bold, even idea|
+4768|136|F|4820.55|1993-11-22|2-HIGH|Clerk#000000875|0|ctions snooze idly beneath the quick waters. fluffily u|
+4769|121|P|136765.03|1995-04-14|4-NOT SPECIFIED|Clerk#000000116|0|pon the asymptotes. idle, final account|
+4770|59|O|72150.68|1995-06-20|2-HIGH|Clerk#000000461|0|cial instructions believe carefully. |
+4771|95|F|49625.21|1992-12-14|1-URGENT|Clerk#000000571|0|lly express deposits serve furiously along the f|
+4772|28|F|64102.93|1994-09-14|1-URGENT|Clerk#000000708|0|es sleep. regular requests haggle furiously slyly |
+4773|122|O|196080.26|1995-12-23|1-URGENT|Clerk#000000327|0|ptotes was slyly along the|
+4774|52|F|124380.73|1993-04-20|3-MEDIUM|Clerk#000000299|0|eposits use blithely bold deposits. carefully regular gifts about the fin|
+4775|128|O|112444.42|1995-08-13|4-NOT SPECIFIED|Clerk#000000609|0|s integrate slyly slyly final instructions. carefully bold pack|
+4800|37|F|91795.13|1992-01-06|5-LOW|Clerk#000000625|0|ggle furiously along the pending pinto beans. deposits use: final foxe|
+4801|88|O|108353.08|1996-01-25|1-URGENT|Clerk#000000553|0|r the final sentiments. pending theodolites sleep doggedly across t|
+4802|130|O|5978.65|1997-01-23|3-MEDIUM|Clerk#000000400|0| ironic, thin packages wake furiously ironic, ironic deposits. the|
+4803|124|O|158776.68|1996-02-08|5-LOW|Clerk#000000892|0|lly unusual courts are ironic|
+4804|37|F|111547.31|1992-01-28|2-HIGH|Clerk#000000614|0|ly final accounts. blithely unusual theodolite|
+4805|16|F|172102.96|1992-04-25|4-NOT SPECIFIED|Clerk#000000514|0|even accounts wake furiously slyly final accounts; blithel|
+4806|7|F|35390.15|1993-04-21|5-LOW|Clerk#000000625|0|ave accounts. furiously pending wa|
+4807|53|O|138902.23|1997-01-09|3-MEDIUM|Clerk#000000310|0|kly. slyly special accounts|
+4832|34|O|84954.79|1997-12-04|3-MEDIUM|Clerk#000000548|0|final accounts sleep among the blithe|
+4833|133|O|84800.44|1996-05-12|3-MEDIUM|Clerk#000000256|0|r deposits against the slyly final excuses slee|
+4834|19|O|124539.00|1996-09-12|2-HIGH|Clerk#000000284|0|lar accounts. furiously ironic accounts haggle slyly |
+4835|146|F|70857.51|1994-10-25|1-URGENT|Clerk#000000250|0|s integrate furiously blithely expr|
+4836|65|O|78711.40|1996-12-18|1-URGENT|Clerk#000000691|0|c packages cajole carefully through the accounts. careful|
+4837|130|O|68519.84|1998-04-24|4-NOT SPECIFIED|Clerk#000000517|0|n accounts are regular, bold accounts. even instructions use request|
+4838|44|F|61811.33|1992-08-02|1-URGENT|Clerk#000000569|0|ffily bold sentiments. carefully close dolphins cajole across the |
+4839|25|F|71241.63|1994-05-10|1-URGENT|Clerk#000000925|0| even somas. slyly express ideas lose carefully. blithely unusu|
+4864|88|F|149614.34|1992-11-11|5-LOW|Clerk#000000423|0|ests nag within the quickly ironic asymptotes. ironic|
+4865|85|O|162113.46|1997-06-07|3-MEDIUM|Clerk#000000418|0|sits boost stealthily above the bl|
+4866|53|O|25767.07|1997-08-07|2-HIGH|Clerk#000000663|0|kages. unusual packages nag fluffily. qui|
+4867|10|F|9741.03|1992-05-21|1-URGENT|Clerk#000000891|0|ss the slyly regular dependencies. fluffily regular deposits within the car|
+4868|76|O|159005.35|1997-03-02|5-LOW|Clerk#000000729|0|regular asymptotes. regular packages sublate carefully al|
+4869|58|F|175422.13|1994-09-26|5-LOW|Clerk#000000802|0|boost! ironic packages un|
+4870|103|F|94534.07|1994-08-06|3-MEDIUM|Clerk#000000911|0|nto beans about the blithely regular d|
+4871|46|O|129636.99|1995-06-12|1-URGENT|Clerk#000000531|0|ven, special instructions across t|
+4896|85|F|93206.35|1992-08-22|1-URGENT|Clerk#000000622|0|sly pending deposits. final accounts boost above the sly, even|
+4897|80|F|115688.85|1992-09-17|5-LOW|Clerk#000000184|0|s. bold pinto beans sleep. evenly final accounts daz|
+4898|14|F|40572.64|1994-07-11|4-NOT SPECIFIED|Clerk#000000841|0|final patterns. special theodolites haggle ruthlessly at the blithely spec|
+4899|61|F|12291.83|1993-10-18|4-NOT SPECIFIED|Clerk#000000348|0| instructions. furiously even packages are furiously speci|
+4900|137|F|221320.76|1992-06-30|4-NOT SPECIFIED|Clerk#000000878|0|sleep quickly unusual |
+4901|79|O|146298.28|1997-12-31|4-NOT SPECIFIED|Clerk#000000980|0|inal dependencies cajole furiously. carefully express accounts na|
+4902|139|O|26011.20|1998-07-04|3-MEDIUM|Clerk#000000874|0| the slyly express dolphins. |
+4903|92|F|34363.63|1992-03-22|4-NOT SPECIFIED|Clerk#000000907|0|yly. multipliers within the fo|
+4928|4|F|59931.42|1993-10-04|4-NOT SPECIFIED|Clerk#000000952|0|slyly brave instructions after the ironic excuses haggle ruthlessly about|
+4929|149|O|135187.33|1996-02-29|3-MEDIUM|Clerk#000000109|0|uests. furiously special ideas poach. pending |
+4930|149|F|176867.34|1994-05-06|5-LOW|Clerk#000000593|0| haggle slyly quietly final theodolites. packages are furious|
+4931|50|F|115759.13|1994-11-17|1-URGENT|Clerk#000000356|0|leep. slyly express dolphins nag slyly. furiously regular s|
+4932|122|F|42927.07|1993-08-10|1-URGENT|Clerk#000000830|0|onic foxes. enticingly reg|
+4933|94|O|42945.82|1995-07-14|3-MEDIUM|Clerk#000000848|0|y special sauternes integr|
+4934|40|O|180478.16|1997-02-17|1-URGENT|Clerk#000000372|0|nes cajole; carefully special accounts haggle. special pinto beans nag |
+4935|40|F|162088.30|1993-05-25|4-NOT SPECIFIED|Clerk#000000601|0|c foxes. fluffily pendin|
+4960|124|F|153259.41|1995-02-26|5-LOW|Clerk#000000229|0|uriously even excuses. fluffily regular instructions along the furiously ironi|
+4961|58|O|89224.24|1998-04-06|3-MEDIUM|Clerk#000000731|0| braids. furiously even theodolites |
+4962|104|F|44781.32|1993-07-28|3-MEDIUM|Clerk#000000008|0| breach never ironic |
+4963|34|O|54175.35|1996-11-07|3-MEDIUM|Clerk#000000754|0|ully unusual epitaphs nod s|
+4964|101|O|204163.10|1997-07-28|4-NOT SPECIFIED|Clerk#000000144|0|ithely final theodolites. blithely regu|
+4965|52|F|110626.82|1993-10-21|5-LOW|Clerk#000000638|0|dependencies poach packages. sometim|
+4966|70|O|59186.02|1996-09-07|2-HIGH|Clerk#000000243|0|accounts. blithely ironic courts wake boldly furiously express |
+4967|98|O|103814.27|1997-02-17|3-MEDIUM|Clerk#000000397|0|e theodolites; furiously b|
+4992|62|F|203904.80|1992-05-10|1-URGENT|Clerk#000000166|0|telets nag carefully am|
+4993|13|F|145730.19|1994-08-04|4-NOT SPECIFIED|Clerk#000000258|0|ing instructions nag furiously. un|
+4994|43|O|216071.76|1996-06-29|4-NOT SPECIFIED|Clerk#000000868|0|oxes wake above the asymptotes. bold requests sleep br|
+4995|40|O|189651.76|1996-01-06|4-NOT SPECIFIED|Clerk#000000748|0|s. even deposits boost along the express, even theodolites. stealthily ir|
+4996|133|F|100750.67|1992-09-14|3-MEDIUM|Clerk#000000433|0|foxes. carefully special packages haggle quickly fluffi|
+4997|47|O|122611.05|1998-03-18|5-LOW|Clerk#000000040|0|egrate final pinto beans. fluffily special notornis use blith|
+4998|32|F|129096.80|1992-01-11|4-NOT SPECIFIED|Clerk#000000054|0|alongside of the quickly final requests hang always|
+4999|85|F|98643.17|1993-06-26|2-HIGH|Clerk#000000504|0| dolphins cajole blithely above the sly |
+5024|124|O|116127.69|1996-10-25|3-MEDIUM|Clerk#000000659|0|r foxes. regular excuses are about the quickly regular theodolites. regular, |
+5025|121|O|20099.43|1997-02-03|5-LOW|Clerk#000000805|0|ackages are slyly about the quickly |
+5026|28|O|13197.78|1997-09-06|1-URGENT|Clerk#000000955|0|y final requests us|
+5027|148|O|181346.56|1997-08-30|2-HIGH|Clerk#000000751|0|e-- final, pending requests along t|
+5028|13|F|30755.69|1992-04-17|2-HIGH|Clerk#000000180|0|ickly blithely express deposits. b|
+5029|11|F|19811.69|1992-11-14|3-MEDIUM|Clerk#000000469|0|. regular accounts haggle slyly. regul|
+5030|106|O|71781.23|1998-05-25|4-NOT SPECIFIED|Clerk#000000564|0| wake slyly furiously thin requests. ironic pinto beans ha|
+5031|139|F|91438.59|1994-12-02|3-MEDIUM|Clerk#000000788|0|lar instructions haggle blithely pending foxes? sometimes final excuses h|
+5056|52|O|62258.18|1997-02-15|5-LOW|Clerk#000000828|0|lithely above the express ideas. blithely final deposits are fluffily spec|
+5057|64|O|76164.41|1997-08-03|1-URGENT|Clerk#000000955|0|r ironic requests of the carefully ironic dependencies wake slyly a|
+5058|119|O|17031.01|1998-03-23|1-URGENT|Clerk#000000367|0| the pending packages wake after the quickly speci|
+5059|43|F|67173.82|1993-11-10|2-HIGH|Clerk#000000058|0|latelets. final, regular accounts cajole furiously ironic pinto beans? do|
+5060|112|F|65218.47|1992-07-07|4-NOT SPECIFIED|Clerk#000000333|0|e according to the excuses. express theodo|
+5061|101|F|52190.52|1993-08-14|1-URGENT|Clerk#000000009|0|e packages use fluffily according to the carefully ironic deposits. bol|
+5062|61|F|109247.00|1992-10-08|3-MEDIUM|Clerk#000000012|0|ithely. blithely bold theodolites affix. blithely final deposits haggle ac|
+5063|23|O|98753.57|1997-05-17|2-HIGH|Clerk#000000745|0|lyly after the pending foxes. express theodolites breach across t|
+5088|130|F|101616.44|1993-01-06|5-LOW|Clerk#000000930|0|ole slyly since the quickly ironic br|
+5089|130|F|109246.54|1992-07-29|1-URGENT|Clerk#000000677|0|cial platelets. quiet, final ideas cajole carefully. unusu|
+5090|89|O|132838.49|1997-03-09|1-URGENT|Clerk#000000953|0|ress accounts affix silently carefully quick accounts. carefully f|
+5091|148|O|47852.06|1998-05-21|3-MEDIUM|Clerk#000000311|0|egular decoys mold carefully fluffily unus|
+5092|22|O|195834.96|1995-10-30|5-LOW|Clerk#000000194|0|are blithely along the pin|
+5093|79|F|190693.92|1993-09-03|3-MEDIUM|Clerk#000000802|0|ully ironic theodolites sleep above the furiously ruthless instructions. bli|
+5094|106|F|74892.08|1993-03-29|4-NOT SPECIFIED|Clerk#000000406|0|uickly pending deposits haggle quickly ide|
+5095|97|F|184583.99|1992-04-22|2-HIGH|Clerk#000000964|0|accounts are carefully! slyly even packages wake slyly a|
+5120|16|O|28007.73|1996-06-05|1-URGENT|Clerk#000000332|0|against the slyly express requests. furiousl|
+5121|133|F|150334.57|1992-05-11|4-NOT SPECIFIED|Clerk#000000736|0|gular requests. furiously final pearls against the permanent, thin courts s|
+5122|70|O|79863.84|1996-02-10|5-LOW|Clerk#000000780|0|blithely. slyly ironic deposits nag. excuses s|
+5123|10|O|11850.45|1998-02-10|1-URGENT|Clerk#000000776|0|ic requests. furiously ironic packages grow above the express, ironic inst|
+5124|25|O|159170.80|1997-04-04|4-NOT SPECIFIED|Clerk#000000749|0|kly even courts. bold packages solve. |
+5125|28|O|38065.28|1998-02-07|5-LOW|Clerk#000000834|0|ructions. dolphins wake slowly carefully unusual |
+5126|112|F|92123.32|1992-10-12|4-NOT SPECIFIED|Clerk#000000270|0|s. unusual deposits |
+5127|73|O|48024.99|1997-01-15|5-LOW|Clerk#000000829|0|fully express pinto beans. slyly final accounts along the ironic dugouts use s|
+5152|44|O|60568.34|1997-01-04|3-MEDIUM|Clerk#000000963|0| for the blithely reg|
+5153|113|O|193832.28|1995-08-26|1-URGENT|Clerk#000000954|0| the furiously ironic foxes. express packages shall cajole carefully across|
+5154|8|O|28070.86|1997-04-13|3-MEDIUM|Clerk#000000316|0|inal requests. slyly regular deposits nag. even deposits haggle agains|
+5155|77|F|70183.29|1994-06-12|2-HIGH|Clerk#000000108|0|y pending deposits are ag|
+5156|125|O|59439.44|1996-11-04|5-LOW|Clerk#000000117|0|ngside of the multipliers solve slyly requests. regu|
+5157|142|O|167056.34|1997-07-06|4-NOT SPECIFIED|Clerk#000000689|0|closely above the unusual deposits. furiously|
+5158|76|O|240284.95|1997-01-21|1-URGENT|Clerk#000000541|0| regular foxes. even foxes wake blithely |
+5159|106|O|147543.26|1996-09-25|1-URGENT|Clerk#000000303|0|tegrate slyly around the slyly sly sauternes. final pa|
+5184|85|O|209155.48|1998-07-20|5-LOW|Clerk#000000250|0|nding accounts detect final, even|
+5185|148|O|206179.68|1997-07-25|3-MEDIUM|Clerk#000000195|0| regular ideas about the even ex|
+5186|52|O|208892.63|1996-08-03|1-URGENT|Clerk#000000332|0|pecial platelets. slyly final ac|
+5187|55|O|46380.69|1997-07-16|3-MEDIUM|Clerk#000000682|0|ckly according to t|
+5188|140|P|66268.86|1995-03-02|4-NOT SPECIFIED|Clerk#000000029|0|counts. finally ironic requests ab|
+5189|71|F|184172.31|1993-11-26|5-LOW|Clerk#000000940|0|e after the pending accounts. asymptotes boost. re|
+5190|58|F|89684.31|1992-04-26|5-LOW|Clerk#000000888|0|equests. slyly unusual|
+5191|77|F|119910.04|1994-12-11|4-NOT SPECIFIED|Clerk#000000318|0|ing, regular deposits alongside of the deposits boost fluffily quickly ev|
+5216|59|O|16763.95|1997-08-14|3-MEDIUM|Clerk#000000418|0|des boost across the platelets. slyly busy theodolit|
+5217|35|O|135745.58|1995-10-13|2-HIGH|Clerk#000000873|0|ons might wake quickly according to th|
+5218|82|F|73882.37|1992-07-30|4-NOT SPECIFIED|Clerk#000000683|0|y ruthless packages according to the bold, ironic package|
+5219|88|O|21267.72|1997-02-27|1-URGENT|Clerk#000000510|0|aggle always. foxes above the ironic deposits |
+5220|10|F|24844.39|1992-07-30|2-HIGH|Clerk#000000051|0| final packages. ideas detect slyly around|
+5221|13|O|71968.10|1995-06-09|4-NOT SPECIFIED|Clerk#000000324|0|lar accounts above the sl|
+5222|80|F|1051.15|1994-05-27|4-NOT SPECIFIED|Clerk#000000613|0|along the bold ideas. furiously final foxes snoo|
+5223|149|F|105561.21|1994-06-30|1-URGENT|Clerk#000000745|0|e. theodolites serve blithely unusual, final foxes. carefully pending packag|
+5248|70|P|86958.28|1995-04-15|2-HIGH|Clerk#000000737|0|theodolites cajole according to the silent packages. quickly ironic packages a|
+5249|103|F|123586.03|1994-09-06|3-MEDIUM|Clerk#000000019|0|refully bold accounts |
+5250|97|O|29673.73|1995-07-16|2-HIGH|Clerk#000000307|0|. carefully final instructions sleep among the finally regular dependen|
+5251|34|O|34004.48|1995-04-12|3-MEDIUM|Clerk#000000687|0| ironic dugouts detect. reque|
+5252|91|O|173145.37|1996-02-17|1-URGENT|Clerk#000000724|0| ironic accounts among the silent asym|
+5253|148|P|108361.46|1995-04-11|2-HIGH|Clerk#000000275|0|egular requests! blithely regular deposits alongside of t|
+5254|112|F|196989.09|1992-07-26|4-NOT SPECIFIED|Clerk#000000527|0|he express, even ideas cajole blithely special requests|
+5255|64|O|75074.07|1996-07-12|5-LOW|Clerk#000000591|0|ly slow forges. express foxes haggle. regular, even asymp|
+5280|34|O|68052.70|1997-12-03|3-MEDIUM|Clerk#000000604|0|riously ironic instructions. ironic ideas according to the accounts boost fur|
+5281|124|O|179418.31|1995-11-02|2-HIGH|Clerk#000000158|0|ackages haggle slyly a|
+5282|50|O|94446.69|1998-01-30|1-URGENT|Clerk#000000030|0|rding to the unusual, bold accounts. regular instructions|
+5283|131|F|18594.66|1994-06-04|3-MEDIUM|Clerk#000000579|0|ests. even, final ideas alongside of t|
+5284|61|O|40548.99|1995-07-09|4-NOT SPECIFIED|Clerk#000000155|0| careful dependencies use sly|
+5285|70|F|99377.51|1994-01-18|2-HIGH|Clerk#000000976|0|p across the furiously ironic deposits.|
+5286|116|O|79646.89|1997-09-26|5-LOW|Clerk#000000606|0|structions are furiously quickly ironic asymptotes. quickly iro|
+5287|25|F|30045.95|1993-12-22|5-LOW|Clerk#000000406|0|regular packages. bold instructions sleep always. carefully final p|
+5312|65|F|66697.95|1995-02-24|2-HIGH|Clerk#000000690|0|ter the even, bold foxe|
+5313|13|O|159870.44|1997-06-17|4-NOT SPECIFIED|Clerk#000000896|0|le. final courts haggle furiously according to the |
+5314|34|O|26999.83|1995-06-02|2-HIGH|Clerk#000000617|0|ions across the quickly special d|
+5315|139|F|55554.97|1992-10-29|4-NOT SPECIFIED|Clerk#000000035|0| furiously. quickly unusual packages use. sly|
+5316|100|F|62316.61|1994-01-31|1-URGENT|Clerk#000000734|0| requests haggle across the regular, pending deposits. furiously regular requ|
+5317|37|F|228002.51|1994-09-09|5-LOW|Clerk#000000687|0|jole quickly at the slyly pend|
+5318|59|F|106935.19|1993-04-04|2-HIGH|Clerk#000000663|0|efully regular dolphins. even ideas nag fluffily furiously even packa|
+5319|98|O|68619.29|1996-01-21|1-URGENT|Clerk#000000237|0|lent requests. quickly pe|
+5344|109|O|88216.32|1998-06-21|3-MEDIUM|Clerk#000000569|0|s. ironic excuses cajole across the|
+5345|31|O|111924.56|1997-08-24|1-URGENT|Clerk#000000057|0|r the slyly silent packages. pending, even pinto b|
+5346|37|F|149536.20|1993-12-26|2-HIGH|Clerk#000000220|0|gly close packages against the even, regular escapades boost evenly accordi|
+5347|49|F|173024.71|1995-02-22|3-MEDIUM|Clerk#000000180|0|onic, regular deposits. packag|
+5348|53|O|119164.96|1997-11-08|5-LOW|Clerk#000000497|0|totes. accounts after the furiously|
+5349|67|O|38038.84|1996-09-01|1-URGENT|Clerk#000000960|0|le along the carefully bold dolphins. carefully special packa|
+5350|76|F|113417.03|1993-10-10|5-LOW|Clerk#000000604|0|ccounts after the carefully pending requests believe |
+5351|122|O|76799.25|1998-05-11|1-URGENT|Clerk#000000443|0|to beans sleep furiously after the carefully even|
+5376|149|F|98422.83|1994-07-04|5-LOW|Clerk#000000392|0|. quickly ironic deposits integrate along|
+5377|64|O|117728.37|1997-04-24|2-HIGH|Clerk#000000917|0|ons nag blithely furiously regula|
+5378|43|F|101899.93|1992-10-25|1-URGENT|Clerk#000000520|0|n ideas. regular accounts haggle. ironic ideas use along the bold ideas. blith|
+5379|89|O|47010.15|1995-08-08|2-HIGH|Clerk#000000503|0|he unusual accounts. carefully special instructi|
+5380|148|O|123014.83|1997-10-12|1-URGENT|Clerk#000000481|0|le slyly about the slyly final dolphins. fu|
+5381|32|F|223995.46|1993-01-29|5-LOW|Clerk#000000531|0|arefully bold packages are slyly furiously ironic foxes. fluffil|
+5382|35|F|138423.03|1992-01-13|5-LOW|Clerk#000000809|0|lent deposits are according to the reg|
+5383|31|O|11474.95|1995-05-26|5-LOW|Clerk#000000409|0|ly bold requests hang furiously furiously unusual accounts. evenly unusu|
+5408|23|F|123477.05|1992-07-21|5-LOW|Clerk#000000735|0|egular requests according to the|
+5409|13|F|145040.38|1992-01-09|5-LOW|Clerk#000000171|0|eans. regular accounts are regul|
+5410|22|O|139104.17|1998-07-28|4-NOT SPECIFIED|Clerk#000000117|0|final deposits: pending excuses boost. ironic theodolites cajole furi|
+5411|61|O|62541.27|1997-05-16|3-MEDIUM|Clerk#000000800|0|equests cajole slyly furious|
+5412|142|O|109979.71|1998-01-20|2-HIGH|Clerk#000000151|0|ets boost furiously regular accounts. regular foxes above th|
+5413|94|O|224382.57|1997-10-17|1-URGENT|Clerk#000000066|0|e even excuses. always final depen|
+5414|100|F|167017.39|1993-03-25|4-NOT SPECIFIED|Clerk#000000242|0|lent dependencies? carefully express requests sleep furiously ac|
+5415|23|F|176864.83|1992-08-05|3-MEDIUM|Clerk#000000998|0|ly even ideas nag blithely above the final instructions|
+5440|130|O|3223.17|1997-01-12|1-URGENT|Clerk#000000154|0|posits boost regularly ironic packages. regular, ironic deposits wak|
+5441|41|F|131891.05|1994-07-21|4-NOT SPECIFIED|Clerk#000000257|0|after the furiously ironic |
+5442|43|O|139332.94|1998-01-13|4-NOT SPECIFIED|Clerk#000000954|0|ully. quickly express accounts against the|
+5443|131|O|124950.79|1996-10-10|4-NOT SPECIFIED|Clerk#000000492|0|al foxes could detect. blithely stealthy asymptotes kind|
+5444|130|P|172908.01|1995-03-18|1-URGENT|Clerk#000000677|0| asymptotes. asymptotes cajole quickly quickly bo|
+5445|115|F|114990.63|1993-07-26|5-LOW|Clerk#000000623|0|s. even, special requests cajole furiously even, |
+5446|7|F|29920.80|1994-06-21|5-LOW|Clerk#000000304|0| furiously final pac|
+5447|13|O|29029.84|1996-03-16|3-MEDIUM|Clerk#000000597|0|uternes around the furiously bold accounts wake after |
+5472|70|F|221636.83|1993-04-11|5-LOW|Clerk#000000552|0|counts. deposits about the slyly dogged pinto beans cajole slyly|
+5473|65|F|63041.33|1992-03-25|4-NOT SPECIFIED|Clerk#000000306|0|te the quickly stealthy ideas. even, regular deposits above|
+5474|55|F|131079.52|1992-06-01|4-NOT SPECIFIED|Clerk#000000487|0|gle blithely enticing ideas. final, exp|
+5475|139|O|10645.48|1996-07-07|5-LOW|Clerk#000000856|0|es shall boost slyly. furiously even deposits lose. instruc|
+5476|91|O|26906.38|1997-11-06|1-URGENT|Clerk#000000189|0|furiously final ideas. furiously bold dependencies sleep care|
+5477|107|O|130125.64|1997-12-30|5-LOW|Clerk#000000689|0|ckages. ironic deposits caj|
+5478|116|O|97502.23|1996-05-17|1-URGENT|Clerk#000000272|0|ckages. quickly pending deposits thrash furiously: bl|
+5479|70|F|70553.45|1993-12-22|3-MEDIUM|Clerk#000000335|0|ng asymptotes. pinto beans sleep care|
+5504|19|F|41492.25|1993-01-06|2-HIGH|Clerk#000000221|0|y pending packages. furiousl|
+5505|95|O|147329.51|1997-10-04|5-LOW|Clerk#000000719|0| final, regular packages according to the slyly ironic accounts nag ironica|
+5506|91|F|8413.31|1993-11-08|1-URGENT|Clerk#000000292|0|nusual theodolites. sly|
+5507|2|O|140363.70|1998-05-28|5-LOW|Clerk#000000692|0|the carefully ironic instructions are quickly iro|
+5508|56|O|3808.05|1996-06-21|1-URGENT|Clerk#000000128|0|y express packages cajole furiously. slyly unusual requests |
+5509|80|F|135335.96|1994-04-08|5-LOW|Clerk#000000164|0|usual deposits use packages. furiously final requests wake slyly about th|
+5510|37|F|126948.81|1993-01-08|3-MEDIUM|Clerk#000000819|0| nag slyly. carefully eve|
+5511|79|F|151089.96|1994-11-29|1-URGENT|Clerk#000000438|0|ng instructions integrate fluffily among the fluffily silent accounts. bli|
+5536|116|O|108196.56|1998-03-16|4-NOT SPECIFIED|Clerk#000000076|0| carefully final dolphins. ironic, ironic deposits lose. bold, |
+5537|118|O|102207.20|1996-10-03|3-MEDIUM|Clerk#000000742|0|ng to the daring, final |
+5538|139|F|90981.28|1993-12-25|1-URGENT|Clerk#000000992|0|ttainments. slyly final ideas are about the furiously silent excuses.|
+5539|119|F|39397.60|1994-07-31|5-LOW|Clerk#000000675|0|structions. slyly regular patterns solve above the carefully expres|
+5540|130|O|90707.58|1996-10-12|4-NOT SPECIFIED|Clerk#000000120|0|y ironic packages cajole blithely|
+5541|143|O|37526.68|1997-09-30|3-MEDIUM|Clerk#000000217|0|encies among the silent accounts sleep slyly quickly pending deposits|
+5542|49|O|6402.41|1996-04-20|4-NOT SPECIFIED|Clerk#000000100|0|riously among the regularly regular pac|
+5543|115|F|118201.53|1993-09-25|3-MEDIUM|Clerk#000000644|0|ckly regular epitaphs. carefully bold accounts haggle furiously|
+5568|31|O|105421.09|1995-06-07|3-MEDIUM|Clerk#000000491|0| nag. fluffily pending de|
+5569|109|F|126113.32|1993-04-30|4-NOT SPECIFIED|Clerk#000000759|0|e regular dependencies. furiously unusual ideas b|
+5570|112|O|78567.55|1996-08-12|2-HIGH|Clerk#000000795|0|eans. ironic, even requests doze |
+5571|103|F|79248.35|1992-12-19|4-NOT SPECIFIED|Clerk#000000184|0|ts cajole furiously carefully regular sheaves. un|
+5572|8|F|182966.39|1994-07-17|2-HIGH|Clerk#000000163|0|e fluffily express deposits cajole slyly across th|
+5573|37|O|158479.37|1996-08-15|3-MEDIUM|Clerk#000000055|0|lites. slyly final pinto beans about the carefully regul|
+5574|28|F|129803.03|1992-03-10|4-NOT SPECIFIED|Clerk#000000002|0|n deposits. special, regular t|
+5575|103|O|51839.94|1995-07-24|5-LOW|Clerk#000000948|0|uriously express frays breach|
+5600|95|O|53649.35|1997-02-08|4-NOT SPECIFIED|Clerk#000000019|0|lly regular deposits. car|
+5601|11|F|118570.79|1992-01-06|2-HIGH|Clerk#000000827|0|gular deposits wake platelets? blithe|
+5602|130|O|67979.49|1997-07-30|3-MEDIUM|Clerk#000000395|0|onic asymptotes haggl|
+5603|71|F|145100.47|1992-06-20|4-NOT SPECIFIED|Clerk#000000535|0| asymptotes. fluffily ironic instructions are. pending pinto bean|
+5604|46|O|98987.51|1998-04-14|4-NOT SPECIFIED|Clerk#000000123|0|ously across the blithely ironic pinto beans. sile|
+5605|35|O|172899.84|1996-08-22|2-HIGH|Clerk#000000538|0|sleep carefully final packages. dependencies wake slyly. theodol|
+5606|149|O|219959.08|1996-11-12|5-LOW|Clerk#000000688|0|uriously express pinto beans. packages sh|
+5607|92|F|24660.06|1992-01-01|4-NOT SPECIFIED|Clerk#000000137|0|c requests promise quickly fluffily ironic deposits. caref|
+5632|79|O|89503.11|1996-02-05|1-URGENT|Clerk#000000508|0|ons. blithely pending pinto beans thrash. furiously busy theodoli|
+5633|79|O|207119.83|1998-05-31|3-MEDIUM|Clerk#000000841|0|cial deposits wake final, final|
+5634|68|O|99494.67|1996-07-31|3-MEDIUM|Clerk#000000915|0|out the accounts. carefully ironic ideas are slyly. sheaves could h|
+5635|70|F|192217.86|1992-08-16|3-MEDIUM|Clerk#000000734|0|nal platelets sleep daringly. idle, final accounts about |
+5636|122|F|143350.75|1995-02-16|3-MEDIUM|Clerk#000000916|0|. boldly even Tiresias sleep. blithely ironic packages among the ca|
+5637|103|O|128776.90|1996-06-17|3-MEDIUM|Clerk#000000183|0|nic dolphins are regular packages. ironic pinto beans hagg|
+5638|109|F|79197.77|1994-01-17|1-URGENT|Clerk#000000355|0|enly bold deposits eat. special realms play against the regular, speci|
+5639|145|F|9669.46|1994-06-02|3-MEDIUM|Clerk#000000005|0|ending packages use after the blithely regular accounts. regular package|
+5664|119|O|186215.81|1998-07-23|2-HIGH|Clerk#000000789|0|the quickly ironic dolp|
+5665|100|F|129821.09|1993-06-28|4-NOT SPECIFIED|Clerk#000000513|0| carefully special instructions. ironic pinto beans nag slyly blithe|
+5666|14|F|121663.68|1994-02-02|2-HIGH|Clerk#000000396|0|mptotes. quickly final instructions are |
+5667|44|O|37301.25|1995-08-10|1-URGENT|Clerk#000000358|0|s print upon the quickly ironic packa|
+5668|109|F|13679.32|1995-03-22|4-NOT SPECIFIED|Clerk#000000047|0|p slyly slyly express accoun|
+5669|74|O|113156.30|1996-05-06|1-URGENT|Clerk#000000336|0|ng packages nag fluffily furio|
+5670|7|F|101429.61|1993-04-21|5-LOW|Clerk#000000922|0|he carefully final packages. deposits are slyly among the requests. |
+5671|43|O|176647.54|1998-02-06|2-HIGH|Clerk#000000838|0|k dependencies. slyly |
+5696|142|P|198723.30|1995-05-04|1-URGENT|Clerk#000000447|0|e quickly unusual pack|
+5697|55|F|99177.69|1992-10-05|1-URGENT|Clerk#000000112|0|pendencies impress furiously. bold, final requests solve ab|
+5698|95|F|154936.43|1994-05-21|3-MEDIUM|Clerk#000000455|0|he furiously silent accounts haggle blithely against the carefully unusual|
+5699|142|F|226314.91|1992-07-30|5-LOW|Clerk#000000311|0|o beans. ironic asymptotes boost. blithe, final courts integrate|
+5700|143|O|79901.18|1997-12-25|1-URGENT|Clerk#000000618|0|ly pending dolphins sleep carefully slyly pending i|
+5701|43|O|16689.19|1997-02-07|5-LOW|Clerk#000000798|0| blithely final pinto beans. blit|
+5702|97|F|153024.28|1993-09-07|4-NOT SPECIFIED|Clerk#000000743|0|ironic accounts. final accounts wake express deposits. final pac|
+5703|121|F|1816.28|1993-05-16|3-MEDIUM|Clerk#000000647|0|ly special instructions. slyly even reque|
+5728|80|F|85397.04|1994-12-11|4-NOT SPECIFIED|Clerk#000000426|0|furiously express pin|
+5729|44|F|88080.33|1994-10-10|2-HIGH|Clerk#000000843|0|uffily sly accounts about|
+5730|11|O|10934.84|1997-12-18|1-URGENT|Clerk#000000181|0|l platelets. ironic pinto beans wake slyly. quickly b|
+5731|8|O|57823.37|1997-05-17|5-LOW|Clerk#000000841|0| silent excuses among the express accounts wake |
+5732|37|O|28330.42|1997-08-03|1-URGENT|Clerk#000000910|0|he quickly bold asymptotes: final platelets wake quickly. blithely final pinto|
+5733|101|F|38545.97|1993-03-17|2-HIGH|Clerk#000000873|0|osits. pending accounts boost quickly. furiously permanent acco|
+5734|94|O|45860.94|1997-10-12|3-MEDIUM|Clerk#000000084|0|efully even braids detect blithely alo|
+5735|40|F|39358.51|1994-12-11|3-MEDIUM|Clerk#000000600|0| bold realms cajole slyly fu|
+5760|25|F|59404.77|1994-05-25|4-NOT SPECIFIED|Clerk#000000498|0|s among the blithely regular frays haggle ironically bold theodolites. al|
+5761|16|O|130345.90|1998-07-06|3-MEDIUM|Clerk#000000208|0|s asymptotes cajole boldly. regular, |
+5762|49|O|165019.32|1997-02-14|1-URGENT|Clerk#000000901|0|ly bold packages: slyly ironic deposits sleep quietly foxes. express a|
+5763|8|O|140838.11|1998-06-26|4-NOT SPECIFIED|Clerk#000000633|0|according to the furiously regular pinto beans. even accounts wake fu|
+5764|131|F|53212.95|1993-10-03|4-NOT SPECIFIED|Clerk#000000363|0| furiously regular deposits haggle fluffily around th|
+5765|52|F|249900.42|1994-12-15|5-LOW|Clerk#000000959|0|longside of the quickly final packages. instructions so|
+5766|49|F|47940.51|1993-09-27|5-LOW|Clerk#000000753|0|. quickly final packages print slyly. fu|
+5767|118|F|135643.87|1992-04-29|2-HIGH|Clerk#000000225|0|ts wake fluffily above the r|
+5792|26|F|158991.89|1993-04-04|2-HIGH|Clerk#000000731|0|packages. doggedly bold deposits integrate furiously across the|
+5793|37|O|119887.47|1997-07-13|2-HIGH|Clerk#000000294|0|thely. fluffily even instructi|
+5794|8|F|122823.78|1993-04-05|5-LOW|Clerk#000000855|0|t accounts kindle about the gifts. as|
+5795|37|F|35514.45|1992-05-05|2-HIGH|Clerk#000000581|0| even instructions x-ray ironic req|
+5796|149|O|23280.61|1996-01-23|3-MEDIUM|Clerk#000000326|0|eodolites. slyly ironic pinto beans at the silent, special request|
+5797|122|O|15313.61|1997-10-15|4-NOT SPECIFIED|Clerk#000000381|0|ng! packages against the blithely b|
+5798|106|O|125011.92|1998-03-30|5-LOW|Clerk#000000343|0|lent accounts affix quickly! platelets run slyly slyly final packages. f|
+5799|26|O|71381.21|1995-08-03|1-URGENT|Clerk#000000238|0| unusual deposits sleep blithely along the carefully even requests. care|
+5824|56|O|169107.85|1996-12-03|2-HIGH|Clerk#000000171|0|unusual packages. even ideas along the even requests are along th|
+5825|61|F|23020.62|1995-02-21|5-LOW|Clerk#000000494|0|regular packages use bravely.|
+5826|22|O|21119.86|1998-06-13|1-URGENT|Clerk#000000087|0|even, regular dependenc|
+5827|31|O|137297.71|1998-07-23|3-MEDIUM|Clerk#000000660|0|hely furiously blithe dolphins. slyly |
+5828|127|F|62172.34|1994-03-06|5-LOW|Clerk#000000377|0|ages boost never during the final packa|
+5829|125|O|183734.56|1997-01-11|1-URGENT|Clerk#000000196|0|gular accounts. bold accounts are blithely furiously ironic r|
+5830|85|F|28223.57|1993-03-25|3-MEDIUM|Clerk#000000233|0|lites haggle. ironic, ironic instructions maintain blit|
+5831|139|O|113505.19|1996-11-17|5-LOW|Clerk#000000585|0|s final, final pinto beans. unusual depos|
+5856|37|F|71460.49|1994-11-06|2-HIGH|Clerk#000000634|0|special excuses. slyly final theodolites cajole blithely furiou|
+5857|124|O|158345.31|1997-11-06|4-NOT SPECIFIED|Clerk#000000267|0|gage blithely. quickly special ac|
+5858|64|F|181320.50|1992-07-14|4-NOT SPECIFIED|Clerk#000000580|0|lyly pending dugouts believe through the ironic deposits. silent s|
+5859|5|O|210643.96|1997-04-23|1-URGENT|Clerk#000000993|0|requests boost. asymptotes across the deposits solve slyly furiously pendin|
+5860|13|F|9495.28|1992-02-20|4-NOT SPECIFIED|Clerk#000000079|0| beans. bold, special foxes sleep about the ir|
+5861|139|O|41450.19|1997-04-10|3-MEDIUM|Clerk#000000094|0|rthogs cajole slyly. express packages sleep blithely final |
+5862|64|O|30550.90|1997-02-20|1-URGENT|Clerk#000000039|0|leep beneath the quickly busy excuses. ironic theodolit|
+5863|65|F|67941.54|1993-11-22|3-MEDIUM|Clerk#000000774|0|ets about the slyly pending ideas sleep according to the blithely |
+5888|46|O|67167.19|1996-09-28|3-MEDIUM|Clerk#000000748|0|quickly against the furiously final requests. evenly fi|
+5889|22|O|15417.57|1995-05-23|5-LOW|Clerk#000000690|0|ites wake across the slyly ironic|
+5890|49|F|41162.24|1992-11-04|2-HIGH|Clerk#000000013|0|packages. final, final reques|
+5891|46|F|41760.00|1992-12-29|3-MEDIUM|Clerk#000000302|0|ounts haggle furiously abo|
+5892|101|P|92340.77|1995-05-09|5-LOW|Clerk#000000639|0| pending instruction|
+5893|2|F|44777.63|1992-07-08|4-NOT SPECIFIED|Clerk#000000560|0|final sentiments. instructions boost above the never speci|
+5894|71|F|70377.31|1994-08-13|2-HIGH|Clerk#000000776|0|regular deposits wake|
+5895|64|O|201419.83|1997-01-01|4-NOT SPECIFIED|Clerk#000000747|0| ironic, unusual requests cajole blithely special, special deposits. s|
+5920|119|F|142767.26|1994-11-20|2-HIGH|Clerk#000000081|0|ns: even ideas cajole slyly among the packages. never ironic patterns|
+5921|58|F|152940.00|1994-04-07|5-LOW|Clerk#000000125|0|kly special requests breach.|
+5922|143|O|142494.99|1996-11-14|5-LOW|Clerk#000000625|0| ironic instructions haggle furiously blithely regular accounts: even platele|
+5923|101|O|157968.27|1997-05-27|2-HIGH|Clerk#000000304|0|o beans haggle slyly above the regular, even dependencies|
+5924|31|O|106823.97|1995-10-10|3-MEDIUM|Clerk#000000433|0|arefully after the pains. blithely ironic pinto |
+5925|146|O|242588.87|1995-11-13|5-LOW|Clerk#000000602|0|ourts. boldly regular foxes might sleep. slyly express tithes against |
+5926|76|F|105770.53|1994-05-20|5-LOW|Clerk#000000071|0| carefully after the furiously even re|
+5927|116|O|84983.90|1997-08-28|4-NOT SPECIFIED|Clerk#000000972|0|endencies according to the slyly ironic foxes detect furiously about the furio|
+5952|148|O|128624.99|1997-04-14|3-MEDIUM|Clerk#000000950|0| regular, final pla|
+5953|7|F|95312.81|1992-03-28|1-URGENT|Clerk#000000049|0|ages are furiously. slowly bold requests|
+5954|28|F|167262.34|1992-12-03|1-URGENT|Clerk#000000968|0|requests along the blith|
+5955|94|P|67944.38|1995-03-27|5-LOW|Clerk#000000340|0|deas integrate. fluffily regular pa|
+5956|22|O|118036.54|1998-05-18|1-URGENT|Clerk#000000587|0|le even, express platelets.|
+5957|89|F|230949.45|1993-12-27|2-HIGH|Clerk#000000020|0| dependencies are slyly. bold accounts according to the carefully regular r|
+5958|115|O|145060.41|1995-09-16|3-MEDIUM|Clerk#000000787|0|e final requests detect alongside of the qu|
+5959|23|F|195515.26|1992-05-15|3-MEDIUM|Clerk#000000913|0|into beans use ironic, unusual foxes. carefully regular excuses boost caref|
+5984|70|F|83413.30|1994-06-18|5-LOW|Clerk#000000023|0|ickly final pains haggle along the furiously ironic pinto bea|
+5985|143|F|3942.73|1995-01-12|3-MEDIUM|Clerk#000000417|0|as nag fluffily slyly permanent accounts. regular depo|
+5986|115|F|92187.80|1992-04-22|2-HIGH|Clerk#000000674|0|iously unusual notornis are |
+5987|64|O|98956.82|1996-08-03|1-URGENT|Clerk#000000464|0| ideas. quietly final accounts haggle blithely pending escapade|
+5988|31|F|41655.51|1993-11-22|4-NOT SPECIFIED|Clerk#000000867|0|fully express accounts. final pi|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/part.tbl b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/part.tbl
new file mode 100644
index 0000000..f58926e
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/part.tbl
@@ -0,0 +1,200 @@
+1|goldenrod lavender spring chocolate lace|Manufacturer#1|Brand#13|PROMO BURNISHED COPPER|7|JUMBO PKG|901.00|ly. slyly ironi|
+2|blush thistle blue yellow saddle|Manufacturer#1|Brand#13|LARGE BRUSHED BRASS|1|LG CASE|902.00|lar accounts amo|
+3|spring green yellow purple cornsilk|Manufacturer#4|Brand#42|STANDARD POLISHED BRASS|21|WRAP CASE|903.00|egular deposits hag|
+4|cornflower chocolate smoke green pink|Manufacturer#3|Brand#34|SMALL PLATED BRASS|14|MED DRUM|904.00|p furiously r|
+5|forest brown coral puff cream|Manufacturer#3|Brand#32|STANDARD POLISHED TIN|15|SM PKG|905.00| wake carefully |
+6|bisque cornflower lawn forest magenta|Manufacturer#2|Brand#24|PROMO PLATED STEEL|4|MED BAG|906.00|sual a|
+7|moccasin green thistle khaki floral|Manufacturer#1|Brand#11|SMALL PLATED COPPER|45|SM BAG|907.00|lyly. ex|
+8|misty lace thistle snow royal|Manufacturer#4|Brand#44|PROMO BURNISHED TIN|41|LG DRUM|908.00|eposi|
+9|thistle dim navajo dark gainsboro|Manufacturer#4|Brand#43|SMALL BURNISHED STEEL|12|WRAP CASE|909.00|ironic foxe|
+10|linen pink saddle puff powder|Manufacturer#5|Brand#54|LARGE BURNISHED STEEL|44|LG CAN|910.01|ithely final deposit|
+11|spring maroon seashell almond orchid|Manufacturer#2|Brand#25|STANDARD BURNISHED NICKEL|43|WRAP BOX|911.01|ng gr|
+12|cornflower wheat orange maroon ghost|Manufacturer#3|Brand#33|MEDIUM ANODIZED STEEL|25|JUMBO CASE|912.01| quickly|
+13|ghost olive orange rosy thistle|Manufacturer#5|Brand#55|MEDIUM BURNISHED NICKEL|1|JUMBO PACK|913.01|osits.|
+14|khaki seashell rose cornsilk navajo|Manufacturer#1|Brand#13|SMALL POLISHED STEEL|28|JUMBO BOX|914.01|kages c|
+15|blanched honeydew sky turquoise medium|Manufacturer#1|Brand#15|LARGE ANODIZED BRASS|45|LG CASE|915.01|usual ac|
+16|deep sky turquoise drab peach|Manufacturer#3|Brand#32|PROMO PLATED TIN|2|MED PACK|916.01|unts a|
+17|indian navy coral pink deep|Manufacturer#4|Brand#43|ECONOMY BRUSHED STEEL|16|LG BOX|917.01| regular accounts|
+18|turquoise indian lemon lavender misty|Manufacturer#1|Brand#11|SMALL BURNISHED STEEL|42|JUMBO PACK|918.01|s cajole slyly a|
+19|chocolate navy tan deep brown|Manufacturer#2|Brand#23|SMALL ANODIZED NICKEL|33|WRAP BOX|919.01| pending acc|
+20|ivory navy honeydew sandy midnight|Manufacturer#1|Brand#12|LARGE POLISHED NICKEL|48|MED BAG|920.02|are across the asympt|
+21|lemon floral azure frosted lime|Manufacturer#3|Brand#33|SMALL BURNISHED TIN|31|MED BAG|921.02|ss packages. pendin|
+22|medium forest blue ghost black|Manufacturer#4|Brand#43|PROMO POLISHED BRASS|19|LG DRUM|922.02| even p|
+23|coral lavender seashell rosy burlywood|Manufacturer#3|Brand#35|MEDIUM BURNISHED TIN|42|JUMBO JAR|923.02|nic, fina|
+24|seashell coral metallic midnight floral|Manufacturer#5|Brand#52|MEDIUM PLATED STEEL|20|MED CASE|924.02| final the|
+25|Algebricksmarine steel firebrick light turquoise|Manufacturer#5|Brand#55|STANDARD BRUSHED COPPER|3|JUMBO BAG|925.02|requests wake|
+26|beige frosted moccasin chocolate snow|Manufacturer#3|Brand#32|SMALL BRUSHED STEEL|32|SM CASE|926.02| instructions i|
+27|saddle puff beige linen yellow|Manufacturer#1|Brand#14|LARGE ANODIZED TIN|20|MED PKG|927.02|s wake. ir|
+28|navajo yellow drab white misty|Manufacturer#4|Brand#44|SMALL PLATED COPPER|19|JUMBO PKG|928.02|x-ray pending, iron|
+29|lemon sky grey salmon orchid|Manufacturer#3|Brand#33|PROMO PLATED COPPER|7|LG DRUM|929.02| carefully fluffi|
+30|cream misty steel spring medium|Manufacturer#4|Brand#42|PROMO ANODIZED TIN|17|LG BOX|930.03|carefully bus|
+31|slate seashell steel medium moccasin|Manufacturer#5|Brand#53|STANDARD BRUSHED TIN|10|LG BAG|931.03|uriously s|
+32|sandy wheat coral spring burnished|Manufacturer#4|Brand#42|ECONOMY PLATED BRASS|31|LG CASE|932.03|urts. carefully fin|
+33|spring bisque salmon slate pink|Manufacturer#2|Brand#22|ECONOMY PLATED NICKEL|16|LG PKG|933.03|ly eve|
+34|khaki steel rose ghost salmon|Manufacturer#1|Brand#13|LARGE BRUSHED STEEL|8|JUMBO BOX|934.03|riously ironic|
+35|green blush tomato burlywood seashell|Manufacturer#4|Brand#43|MEDIUM ANODIZED BRASS|14|JUMBO PACK|935.03|e carefully furi|
+36|chiffon tan forest moccasin dark|Manufacturer#2|Brand#25|SMALL BURNISHED COPPER|3|JUMBO CAN|936.03|olites o|
+37|royal coral orange burnished navajo|Manufacturer#4|Brand#45|LARGE POLISHED TIN|48|JUMBO BOX|937.03|silent |
+38|seashell papaya white mint brown|Manufacturer#4|Brand#43|ECONOMY ANODIZED BRASS|11|SM JAR|938.03|structions inte|
+39|rose medium floral salmon powder|Manufacturer#5|Brand#53|SMALL POLISHED TIN|43|JUMBO JAR|939.03|se slowly above the fl|
+40|lemon midnight metallic sienna steel|Manufacturer#2|Brand#25|ECONOMY BURNISHED COPPER|27|SM CASE|940.04|! blithely specia|
+41|burlywood goldenrod pink peru sienna|Manufacturer#2|Brand#23|ECONOMY ANODIZED TIN|7|WRAP JAR|941.04|uriously. furiously cl|
+42|midnight turquoise lawn beige thistle|Manufacturer#5|Brand#52|MEDIUM BURNISHED TIN|45|LG BOX|942.04|the slow|
+43|medium lace midnight royal chartreuse|Manufacturer#4|Brand#44|PROMO POLISHED STEEL|5|WRAP CASE|943.04|e slyly along the ir|
+44|saddle cream wheat lemon burnished|Manufacturer#4|Brand#45|MEDIUM PLATED TIN|48|SM PACK|944.04|pinto beans. carefully|
+45|lawn peru ghost khaki maroon|Manufacturer#4|Brand#43|SMALL BRUSHED NICKEL|9|WRAP BAG|945.04|nts bo|
+46|honeydew turquoise Algebricksmarine spring tan|Manufacturer#1|Brand#11|STANDARD POLISHED TIN|45|WRAP CASE|946.04|the blithely unusual |
+47|honeydew red azure magenta brown|Manufacturer#4|Brand#45|LARGE BURNISHED BRASS|14|JUMBO PACK|947.04| even plate|
+48|slate thistle cornsilk pale forest|Manufacturer#5|Brand#53|STANDARD BRUSHED STEEL|27|JUMBO CASE|948.04|ng to the depo|
+49|light firebrick cyan puff blue|Manufacturer#2|Brand#24|SMALL BURNISHED TIN|31|MED DRUM|949.04|ar pack|
+50|linen blanched tomato slate medium|Manufacturer#3|Brand#33|LARGE ANODIZED TIN|25|WRAP PKG|950.05|kages m|
+51|lime frosted indian dodger linen|Manufacturer#4|Brand#45|ECONOMY BURNISHED NICKEL|34|JUMBO PACK|951.05|n foxes|
+52|lemon midnight lace sky deep|Manufacturer#3|Brand#35|STANDARD BURNISHED TIN|25|WRAP CASE|952.05| final deposits. fu|
+53|bisque rose cornsilk seashell purple|Manufacturer#2|Brand#23|ECONOMY BURNISHED NICKEL|32|MED BAG|953.05|mptot|
+54|blanched mint yellow papaya cyan|Manufacturer#2|Brand#21|LARGE BURNISHED COPPER|19|WRAP CASE|954.05|e blithely|
+55|sky cream deep tomato rosy|Manufacturer#2|Brand#23|ECONOMY BRUSHED COPPER|9|MED BAG|955.05|ly final pac|
+56|antique beige brown deep dodger|Manufacturer#1|Brand#12|MEDIUM PLATED STEEL|20|WRAP DRUM|956.05|ts. blithel|
+57|purple blue light sienna deep|Manufacturer#3|Brand#32|MEDIUM BURNISHED BRASS|49|MED PKG|957.05|lly abov|
+58|linen hot cornsilk drab bisque|Manufacturer#5|Brand#53|STANDARD POLISHED TIN|44|LG PACK|958.05| fluffily blithely reg|
+59|misty brown medium mint salmon|Manufacturer#5|Brand#53|MEDIUM POLISHED TIN|2|LG BAG|959.05|regular exc|
+60|snow spring sandy olive tomato|Manufacturer#1|Brand#11|LARGE POLISHED COPPER|27|JUMBO CASE|960.06| integ|
+61|light tan linen tomato peach|Manufacturer#5|Brand#54|SMALL BURNISHED NICKEL|18|WRAP DRUM|961.06|es. blithely en|
+62|tan cornsilk spring grey chocolate|Manufacturer#3|Brand#35|STANDARD BRUSHED BRASS|39|JUMBO BOX|962.06|ckly across the carefu|
+63|burnished puff coral light papaya|Manufacturer#3|Brand#32|STANDARD BURNISHED NICKEL|10|JUMBO CAN|963.06| quickly |
+64|Algebricksmarine coral lemon ivory gainsboro|Manufacturer#2|Brand#21|MEDIUM ANODIZED BRASS|1|JUMBO CAN|964.06|efully regular pi|
+65|slate drab medium puff gainsboro|Manufacturer#5|Brand#53|MEDIUM BRUSHED COPPER|3|MED CAN|965.06|posits after the quic|
+66|cornflower pale almond lemon linen|Manufacturer#3|Brand#35|PROMO ANODIZED NICKEL|46|SM CASE|966.06|haggle blithely iro|
+67|slate salmon rose spring seashell|Manufacturer#2|Brand#21|SMALL BRUSHED TIN|31|WRAP DRUM|967.06| regular, p|
+68|bisque ivory mint purple almond|Manufacturer#1|Brand#11|PROMO ANODIZED STEEL|10|WRAP BOX|968.06|eposits shall h|
+69|lace burnished rosy antique metallic|Manufacturer#5|Brand#52|MEDIUM POLISHED BRASS|2|SM BOX|969.06|ely final depo|
+70|violet seashell firebrick dark navajo|Manufacturer#1|Brand#11|STANDARD BRUSHED STEEL|42|LG PACK|970.07|inal gifts. sl|
+71|violet firebrick cream peru white|Manufacturer#3|Brand#33|STANDARD PLATED BRASS|26|WRAP DRUM|971.07| packages alongside|
+72|hot spring yellow azure dodger|Manufacturer#2|Brand#23|STANDARD ANODIZED TIN|25|JUMBO PACK|972.07|efully final the|
+73|cream moccasin royal dim chiffon|Manufacturer#2|Brand#21|SMALL BRUSHED COPPER|35|WRAP DRUM|973.07|ts haggl|
+74|frosted grey Algebricksmarine thistle papaya|Manufacturer#5|Brand#55|ECONOMY ANODIZED BRASS|25|JUMBO CASE|974.07|ent foxes|
+75|Algebricksmarine maroon wheat salmon metallic|Manufacturer#3|Brand#35|SMALL BURNISHED NICKEL|39|SM JAR|975.07|s sleep furiou|
+76|rosy light lime puff sandy|Manufacturer#3|Brand#34|MEDIUM BRUSHED COPPER|9|SM PKG|976.07|n accounts sleep qu|
+77|mint bisque chiffon snow firebrick|Manufacturer#5|Brand#52|STANDARD BRUSHED COPPER|13|MED PKG|977.07|uests.|
+78|blush forest slate seashell puff|Manufacturer#1|Brand#14|ECONOMY POLISHED STEEL|24|LG JAR|978.07|icing deposits wake|
+79|gainsboro pink grey tan almond|Manufacturer#4|Brand#45|PROMO ANODIZED BRASS|22|JUMBO BAG|979.07| foxes are slyly regu|
+80|tomato chartreuse coral turquoise linen|Manufacturer#4|Brand#44|PROMO PLATED BRASS|28|MED CAN|980.08|unusual dependencies i|
+81|misty sandy cornsilk dodger blush|Manufacturer#5|Brand#53|ECONOMY BRUSHED TIN|21|MED BAG|981.08|ove the furiou|
+82|khaki tomato purple almond tan|Manufacturer#1|Brand#15|ECONOMY POLISHED TIN|12|WRAP BOX|982.08|ial requests haggle |
+83|blush green dim lawn peru|Manufacturer#1|Brand#12|PROMO BURNISHED NICKEL|47|SM CAN|983.08|ly regul|
+84|salmon floral cream rose dark|Manufacturer#4|Brand#45|SMALL ANODIZED NICKEL|26|JUMBO PACK|984.08|ideas nag|
+85|dim deep Algebricksmarine smoke pale|Manufacturer#5|Brand#55|PROMO ANODIZED NICKEL|16|LG BAG|985.08| silent|
+86|green blanched firebrick dim cream|Manufacturer#4|Brand#44|STANDARD PLATED TIN|37|LG CASE|986.08| daring sheaves |
+87|purple lace seashell antique orange|Manufacturer#4|Brand#41|LARGE PLATED STEEL|41|WRAP PACK|987.08|yly final|
+88|lime orange bisque chartreuse lemon|Manufacturer#4|Brand#44|PROMO PLATED COPPER|16|SM CASE|988.08|e regular packages. |
+89|ghost lace lemon sienna saddle|Manufacturer#5|Brand#53|STANDARD BURNISHED STEEL|7|MED JAR|989.08|y final pinto |
+90|hot rosy violet plum pale|Manufacturer#5|Brand#51|ECONOMY POLISHED STEEL|49|JUMBO CAN|990.09|caref|
+91|misty bisque lavender spring turquoise|Manufacturer#2|Brand#21|STANDARD BRUSHED TIN|32|JUMBO PKG|991.09|counts dete|
+92|blush magenta ghost tomato rose|Manufacturer#2|Brand#22|STANDARD ANODIZED TIN|35|JUMBO PKG|992.09|he ironic accounts. sp|
+93|pale yellow cornsilk dodger moccasin|Manufacturer#2|Brand#24|LARGE ANODIZED TIN|2|WRAP DRUM|993.09| platel|
+94|blanched pink frosted mint snow|Manufacturer#3|Brand#35|STANDARD POLISHED BRASS|32|SM BOX|994.09|s accounts cajo|
+95|dodger beige wheat orchid navy|Manufacturer#3|Brand#33|LARGE BRUSHED TIN|36|WRAP DRUM|995.09| final pinto beans |
+96|chocolate light firebrick rose indian|Manufacturer#5|Brand#53|STANDARD BRUSHED STEEL|32|SM CASE|996.09|ng to the bli|
+97|coral dodger beige black chartreuse|Manufacturer#3|Brand#33|MEDIUM POLISHED BRASS|49|WRAP CAN|997.09|ss excuses sleep am|
+98|frosted peru chiffon yellow Algebricksmarine|Manufacturer#5|Brand#54|STANDARD ANODIZED BRASS|22|MED JAR|998.09|e the q|
+99|mint grey purple sienna metallic|Manufacturer#2|Brand#21|SMALL BURNISHED STEEL|11|JUMBO PKG|999.09|press|
+100|cyan orchid indian cornflower saddle|Manufacturer#3|Brand#33|ECONOMY ANODIZED TIN|4|LG BAG|1000.10|of the steal|
+101|powder deep lavender violet gainsboro|Manufacturer#3|Brand#32|LARGE ANODIZED STEEL|26|JUMBO JAR|1001.10|ly even,|
+102|papaya maroon blush powder sky|Manufacturer#3|Brand#31|MEDIUM BURNISHED BRASS|17|SM DRUM|1002.10|ular packa|
+103|navy sky spring orchid forest|Manufacturer#2|Brand#25|MEDIUM PLATED BRASS|45|WRAP DRUM|1003.10|e blithely blith|
+104|plum cyan cornflower midnight royal|Manufacturer#1|Brand#13|MEDIUM ANODIZED STEEL|36|JUMBO BAG|1004.10|ites sleep quickly|
+105|dodger slate pale mint navajo|Manufacturer#1|Brand#15|SMALL POLISHED COPPER|27|LG DRUM|1005.10|odolites was |
+106|cornsilk bisque seashell lemon frosted|Manufacturer#3|Brand#31|MEDIUM PLATED BRASS|28|WRAP DRUM|1006.10|unts maintain |
+107|violet honeydew bisque sienna orchid|Manufacturer#5|Brand#53|SMALL BURNISHED TIN|12|MED BOX|1007.10|slyly special depos|
+108|bisque peach magenta tomato yellow|Manufacturer#1|Brand#12|PROMO PLATED NICKEL|41|MED PKG|1008.10|after the carefully |
+109|lemon black indian cornflower pale|Manufacturer#3|Brand#33|ECONOMY POLISHED TIN|11|LG PACK|1009.10|instruction|
+110|firebrick navy rose beige black|Manufacturer#3|Brand#33|STANDARD BURNISHED COPPER|46|LG DRUM|1010.11|t quickly a|
+111|orange cornflower mint snow peach|Manufacturer#5|Brand#54|LARGE BRUSHED COPPER|28|JUMBO JAR|1011.11|kly bold epitaphs |
+112|hot Algebricksmarine tomato lace indian|Manufacturer#4|Brand#43|PROMO BRUSHED STEEL|42|JUMBO CAN|1012.11|the express, |
+113|almond seashell azure blanched light|Manufacturer#3|Brand#31|PROMO POLISHED TIN|23|LG CAN|1013.11|finally even |
+114|pink black blanched lace chartreuse|Manufacturer#5|Brand#51|MEDIUM POLISHED NICKEL|41|MED PACK|1014.11|ully final foxes. pint|
+115|spring chiffon cream orchid dodger|Manufacturer#4|Brand#45|STANDARD POLISHED STEEL|24|MED CAN|1015.11|counts nag! caref|
+116|goldenrod black slate forest red|Manufacturer#5|Brand#53|PROMO POLISHED NICKEL|33|SM PACK|1016.11|usly final courts |
+117|tomato honeydew pale red yellow|Manufacturer#1|Brand#14|SMALL BRUSHED TIN|25|LG BAG|1017.11|ages acc|
+118|ghost plum brown coral cornsilk|Manufacturer#2|Brand#25|PROMO ANODIZED TIN|31|MED PACK|1018.11|ly ironic pinto|
+119|olive metallic slate peach green|Manufacturer#4|Brand#43|LARGE POLISHED STEEL|30|WRAP CASE|1019.11|out the quickly r|
+120|pink powder mint moccasin navajo|Manufacturer#1|Brand#14|SMALL ANODIZED NICKEL|45|WRAP JAR|1020.12|lly a|
+121|bisque royal goldenrod medium thistle|Manufacturer#1|Brand#14|ECONOMY BRUSHED COPPER|13|SM PKG|1021.12|deposi|
+122|gainsboro royal forest dark lace|Manufacturer#2|Brand#21|MEDIUM ANODIZED TIN|8|LG DRUM|1022.12|sts c|
+123|deep dim peach light beige|Manufacturer#1|Brand#12|SMALL BURNISHED TIN|31|JUMBO PKG|1023.12|ray regula|
+124|wheat blush forest metallic navajo|Manufacturer#3|Brand#32|PROMO ANODIZED STEEL|1|LG BOX|1024.12|g the expr|
+125|mint ivory saddle peach midnight|Manufacturer#1|Brand#12|STANDARD BRUSHED BRASS|17|WRAP BAG|1025.12|kages against|
+126|burnished black blue metallic orchid|Manufacturer#4|Brand#45|MEDIUM BRUSHED NICKEL|4|LG BAG|1026.12|es sleep al|
+127|royal coral orchid spring sky|Manufacturer#5|Brand#52|SMALL BURNISHED NICKEL|14|LG JAR|1027.12|lithely expr|
+128|dark burlywood burnished snow sky|Manufacturer#2|Brand#22|PROMO PLATED TIN|5|SM BAG|1028.12|e of the furiously ex|
+129|grey spring chiffon thistle lime|Manufacturer#1|Brand#15|LARGE POLISHED TIN|20|SM JAR|1029.12| careful|
+130|gainsboro powder cyan pale rosy|Manufacturer#2|Brand#23|SMALL PLATED NICKEL|26|LG BOX|1030.13|ake slyly|
+131|tomato moccasin cyan brown goldenrod|Manufacturer#5|Brand#52|STANDARD ANODIZED BRASS|43|MED DRUM|1031.13|nts wake dar|
+132|seashell papaya tomato lime hot|Manufacturer#4|Brand#45|STANDARD BURNISHED BRASS|2|WRAP DRUM|1032.13|ckly expre|
+133|firebrick black dodger pink salmon|Manufacturer#1|Brand#13|SMALL BRUSHED NICKEL|19|LG PKG|1033.13| final pinto beans|
+134|steel beige mint maroon indian|Manufacturer#4|Brand#42|SMALL POLISHED STEEL|35|SM PKG|1034.13|es. bold pa|
+135|thistle chocolate ghost gainsboro peru|Manufacturer#2|Brand#21|MEDIUM BURNISHED STEEL|24|JUMBO CASE|1035.13|l frets |
+136|cornsilk maroon blanched thistle rosy|Manufacturer#2|Brand#22|SMALL PLATED STEEL|2|WRAP BAG|1036.13|kages print carefully|
+137|cornsilk drab ghost sandy royal|Manufacturer#3|Brand#31|ECONOMY PLATED STEEL|25|MED PACK|1037.13|the t|
+138|dark Algebricksmarine tomato medium puff|Manufacturer#1|Brand#13|ECONOMY BURNISHED COPPER|42|JUMBO DRUM|1038.13|ts solve acro|
+139|floral steel burlywood navy cream|Manufacturer#3|Brand#32|MEDIUM BRUSHED STEEL|7|SM BOX|1039.13|ter t|
+140|Algebricksmarine lavender maroon slate hot|Manufacturer#5|Brand#53|STANDARD PLATED STEEL|45|SM BOX|1040.14|oss the carefu|
+141|honeydew magenta tomato spring medium|Manufacturer#3|Brand#35|STANDARD ANODIZED STEEL|23|SM PKG|1041.14|ans nag furiously pen|
+142|chartreuse linen grey slate saddle|Manufacturer#5|Brand#55|STANDARD ANODIZED BRASS|36|MED JAR|1042.14|he accounts. pac|
+143|bisque dodger blanched steel maroon|Manufacturer#3|Brand#34|ECONOMY PLATED TIN|44|MED BAG|1043.14|nts across the|
+144|hot midnight orchid dim steel|Manufacturer#1|Brand#14|SMALL ANODIZED TIN|26|SM BOX|1044.14|owly |
+145|navajo lavender chocolate deep hot|Manufacturer#5|Brand#53|PROMO BRUSHED COPPER|24|SM BAG|1045.14|es wake furiously blit|
+146|azure smoke mint cream burlywood|Manufacturer#3|Brand#34|STANDARD BRUSHED COPPER|11|WRAP PACK|1046.14|unts cajole|
+147|honeydew orange dodger linen lace|Manufacturer#1|Brand#11|MEDIUM PLATED COPPER|29|JUMBO PKG|1047.14|wake never bold |
+148|yellow white ghost lavender salmon|Manufacturer#3|Brand#31|STANDARD PLATED STEEL|20|SM BOX|1048.14|platelets wake fu|
+149|tan thistle frosted indian lawn|Manufacturer#2|Brand#24|MEDIUM BURNISHED NICKEL|6|MED PKG|1049.14|leep requests. dog|
+150|pale rose navajo firebrick Algebricksmarine|Manufacturer#3|Brand#35|LARGE BRUSHED TIN|21|SM BAG|1050.15|ironic foxes|
+151|chartreuse linen violet ghost thistle|Manufacturer#3|Brand#34|LARGE PLATED BRASS|45|MED CAN|1051.15|ccounts nag i|
+152|white sky antique tomato chartreuse|Manufacturer#5|Brand#53|MEDIUM POLISHED STEEL|48|MED CASE|1052.15|thely regular t|
+153|linen frosted slate coral peru|Manufacturer#1|Brand#11|STANDARD PLATED TIN|20|MED BAG|1053.15|thlessly. silen|
+154|peru moccasin peach pale spring|Manufacturer#1|Brand#11|ECONOMY ANODIZED TIN|1|JUMBO BAG|1054.15|posits |
+155|puff yellow cyan tomato purple|Manufacturer#2|Brand#21|SMALL BRUSHED NICKEL|28|WRAP CASE|1055.15|lly ironic, r|
+156|almond ghost powder blush forest|Manufacturer#4|Brand#43|SMALL POLISHED NICKEL|2|LG PKG|1056.15| pinto beans. eve|
+157|navajo linen coral brown forest|Manufacturer#1|Brand#11|ECONOMY ANODIZED STEEL|26|JUMBO PACK|1057.15|ial courts. ru|
+158|magenta light misty navy honeydew|Manufacturer#4|Brand#45|MEDIUM BURNISHED COPPER|47|LG JAR|1058.15| ideas detect slyl|
+159|white orange antique beige Algebricksmarine|Manufacturer#4|Brand#43|SMALL ANODIZED BRASS|46|SM BAG|1059.15| ironic requests-- pe|
+160|frosted cornflower khaki salmon metallic|Manufacturer#5|Brand#55|STANDARD POLISHED COPPER|47|JUMBO CAN|1060.16|nts are carefully|
+161|metallic khaki navy forest cyan|Manufacturer#2|Brand#22|STANDARD PLATED TIN|17|SM PACK|1061.16|r the bl|
+162|burlywood cornflower Algebricksmarine misty snow|Manufacturer#3|Brand#33|MEDIUM ANODIZED COPPER|35|JUMBO PACK|1062.16|e slyly around th|
+163|blush metallic maroon lawn forest|Manufacturer#2|Brand#21|ECONOMY PLATED TIN|34|WRAP DRUM|1063.16|nly s|
+164|orange cyan magenta navajo indian|Manufacturer#2|Brand#23|LARGE PLATED BRASS|35|JUMBO BAG|1064.16|mong th|
+165|white dim cornflower sky seashell|Manufacturer#1|Brand#15|STANDARD PLATED STEEL|24|SM CAN|1065.16| carefully fin|
+166|linen bisque tomato gainsboro goldenrod|Manufacturer#5|Brand#52|LARGE POLISHED COPPER|4|MED BAG|1066.16|ss the|
+167|almond floral grey dim sky|Manufacturer#3|Brand#32|LARGE ANODIZED STEEL|46|WRAP BOX|1067.16|ic ac|
+168|lace gainsboro burlywood smoke tomato|Manufacturer#1|Brand#13|SMALL BRUSHED COPPER|20|JUMBO DRUM|1068.16|ss package|
+169|bisque misty sky cornflower peach|Manufacturer#5|Brand#55|STANDARD POLISHED BRASS|10|JUMBO CASE|1069.16|lets alongside of|
+170|peru grey blanched goldenrod yellow|Manufacturer#3|Brand#33|LARGE POLISHED COPPER|28|LG DRUM|1070.17|yly s|
+171|beige violet black magenta chartreuse|Manufacturer#1|Brand#11|STANDARD BURNISHED COPPER|40|LG JAR|1071.17| the r|
+172|medium goldenrod linen sky coral|Manufacturer#5|Brand#53|PROMO PLATED NICKEL|28|MED CASE|1072.17|quick as|
+173|chartreuse seashell powder navy grey|Manufacturer#1|Brand#12|ECONOMY BURNISHED TIN|17|LG CASE|1073.17|sly bold excuses haggl|
+174|hot cornflower slate saddle pale|Manufacturer#1|Brand#15|ECONOMY BRUSHED COPPER|25|LG CASE|1074.17| accounts nag ab|
+175|magenta blue chartreuse tan green|Manufacturer#1|Brand#11|PROMO ANODIZED TIN|45|JUMBO JAR|1075.17|ole against the|
+176|pink drab ivory papaya grey|Manufacturer#2|Brand#24|SMALL ANODIZED STEEL|40|MED CAN|1076.17|blithely. ironic|
+177|indian turquoise purple green spring|Manufacturer#2|Brand#21|MEDIUM BRUSHED STEEL|42|LG BAG|1077.17|ermanently eve|
+178|lace blanched magenta yellow almond|Manufacturer#1|Brand#13|STANDARD POLISHED TIN|10|LG JAR|1078.17|regular instructions.|
+179|deep puff brown blue burlywood|Manufacturer#4|Brand#43|ECONOMY BRUSHED STEEL|20|LG JAR|1079.17|ely regul|
+180|seashell maroon lace burnished lavender|Manufacturer#3|Brand#33|STANDARD BURNISHED NICKEL|7|WRAP BAG|1080.18|oss the |
+181|antique plum smoke pink dodger|Manufacturer#2|Brand#24|MEDIUM PLATED STEEL|19|WRAP CAN|1081.18|al deposits |
+182|beige cyan burlywood chiffon light|Manufacturer#3|Brand#31|MEDIUM ANODIZED COPPER|11|JUMBO CAN|1082.18|bits are |
+183|ivory white burnished papaya cornflower|Manufacturer#5|Brand#52|PROMO POLISHED STEEL|35|LG PKG|1083.18|ly regular excus|
+184|ghost honeydew cyan lawn powder|Manufacturer#5|Brand#53|SMALL POLISHED TIN|42|LG BOX|1084.18|ding courts. idly iro|
+185|firebrick black ivory spring medium|Manufacturer#4|Brand#44|ECONOMY POLISHED TIN|4|WRAP BAG|1085.18|even foxe|
+186|grey purple chocolate turquoise plum|Manufacturer#2|Brand#23|ECONOMY BRUSHED TIN|15|JUMBO PKG|1086.18|ly reg|
+187|white red lace deep pale|Manufacturer#4|Brand#45|PROMO ANODIZED BRASS|45|MED CAN|1087.18|leep slyly s|
+188|moccasin steel rosy drab white|Manufacturer#5|Brand#54|ECONOMY ANODIZED BRASS|9|MED CAN|1088.18| above the silent p|
+189|dodger moccasin lemon purple thistle|Manufacturer#2|Brand#22|MEDIUM BRUSHED BRASS|13|WRAP DRUM|1089.18|en requests. sauternes|
+190|chartreuse goldenrod midnight cornflower blush|Manufacturer#5|Brand#53|LARGE BURNISHED NICKEL|23|WRAP BAG|1090.19| furiously even d|
+191|mint midnight puff forest peach|Manufacturer#3|Brand#31|MEDIUM POLISHED BRASS|36|WRAP BOX|1091.19| asymptote|
+192|thistle puff pink cream orange|Manufacturer#3|Brand#34|STANDARD BRUSHED COPPER|17|MED BAG|1092.19|uickly regular, expr|
+193|turquoise lime royal metallic azure|Manufacturer#4|Brand#45|ECONOMY BURNISHED BRASS|31|SM PKG|1093.19|final ideas wake furi|
+194|brown black cream navy plum|Manufacturer#5|Brand#51|ECONOMY POLISHED STEEL|7|SM CAN|1094.19|y special accoun|
+195|bisque sienna hot goldenrod khaki|Manufacturer#4|Brand#41|STANDARD BRUSHED NICKEL|40|MED CASE|1095.19|oxes sleep care|
+196|pale peru linen hot maroon|Manufacturer#3|Brand#33|SMALL BURNISHED NICKEL|3|JUMBO JAR|1096.19|uickly special |
+197|lawn lemon khaki rosy blue|Manufacturer#5|Brand#52|SMALL ANODIZED COPPER|18|SM JAR|1097.19|lithely after the eve|
+198|orange cornflower indian Algebricksmarine white|Manufacturer#4|Brand#41|PROMO BRUSHED NICKEL|43|SM PACK|1098.19|ackages? carefully re|
+199|ivory slate lavender tan royal|Manufacturer#3|Brand#31|ECONOMY PLATED STEEL|23|JUMBO DRUM|1099.19|ickly regul|
+200|peach cornsilk navy rosy red|Manufacturer#5|Brand#54|MEDIUM POLISHED BRASS|22|LG PKG|1100.20|furiously even depo|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/partsupp.tbl b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/partsupp.tbl
new file mode 100644
index 0000000..d8e5856
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/partsupp.tbl
@@ -0,0 +1,800 @@
+1|2|3325|771.64|, even theodolites. regular, final theodolites eat after the carefully pending foxes. furiously regular deposits sleep slyly. carefully bold realms above the ironic dependencies haggle careful|
+1|4|8076|993.49|ven ideas. quickly even packages print. pending multipliers must have to are fluff|
+1|6|3956|337.09|after the fluffily ironic deposits? blithely special dependencies integrate furiously even excuses. blithely silent theodolites could have to haggle pending, express requests; fu|
+1|8|4069|357.84|al, regular dependencies serve carefully after the quickly final pinto beans. furiously even deposits sleep quickly final, silent pinto beans. fluffily reg|
+2|3|8895|378.49|nic accounts. final accounts sleep furiously about the ironic, bold packages. regular, regular accounts|
+2|5|4969|915.27|ptotes. quickly pending dependencies integrate furiously. fluffily ironic ideas impress blithely above the express accounts. furiously even epitaphs need to wak|
+2|7|8539|438.37|blithely bold ideas. furiously stealthy packages sleep fluffily. slyly special deposits snooze furiously carefully regular accounts. regular deposits according to the accounts nag carefully slyl|
+2|9|3025|306.39|olites. deposits wake carefully. even, express requests cajole. carefully regular ex|
+3|4|4651|920.92|ilent foxes affix furiously quickly unusual requests. even packages across the carefully even theodolites nag above the sp|
+3|6|4093|498.13|ending dependencies haggle fluffily. regular deposits boost quickly carefully regular requests. deposits affix furiously around the pinto beans. ironic, unusual platelets across the p|
+3|8|3917|645.40|of the blithely regular theodolites. final theodolites haggle blithely carefully unusual ideas. blithely even f|
+3|10|9942|191.92| unusual, ironic foxes according to the ideas detect furiously alongside of the even, express requests. blithely regular the|
+4|5|1339|113.97| carefully unusual ideas. packages use slyly. blithely final pinto beans cajole along the furiously express requests. regular orbits haggle carefully. care|
+4|7|6377|591.18|ly final courts haggle carefully regular accounts. carefully regular accounts could integrate slyly. slyly express packages about the accounts wake slyly|
+4|9|2694|51.37|g, regular deposits: quick instructions run across the carefully ironic theodolites-- final dependencies haggle into the dependencies. f|
+4|1|2480|444.37|requests sleep quickly regular accounts. theodolites detect. carefully final depths w|
+5|6|3735|255.88|arefully even requests. ironic requests cajole carefully even dolphin|
+5|8|9653|50.52|y stealthy deposits. furiously final pinto beans wake furiou|
+5|10|1329|219.83|iously regular deposits wake deposits. pending pinto beans promise ironic dependencies. even, regular pinto beans integrate|
+5|2|6925|537.98|sits. quickly fluffy packages wake quickly beyond the blithely regular requests. pending requests cajole among the final pinto beans. carefully busy theodolites affix quickly stealthily |
+6|7|8851|130.72|usly final packages. slyly ironic accounts poach across the even, sly requests. carefully pending request|
+6|9|1627|424.25| quick packages. ironic deposits print. furiously silent platelets across the carefully final requests are slyly along the furiously even instructi|
+6|1|3336|642.13|final instructions. courts wake packages. blithely unusual realms along the multipliers nag |
+6|3|6451|175.32| accounts alongside of the slyly even accounts wake carefully final instructions-- ruthless platelets wake carefully ideas. even deposits are quickly final,|
+7|8|7454|763.98|y express tithes haggle furiously even foxes. furiously ironic deposits sleep toward the furiously unusual|
+7|10|2770|149.66|hould have to nag after the blithely final asymptotes. fluffily spe|
+7|2|3377|68.77|usly against the daring asymptotes. slyly regular platelets sleep quickly blithely regular deposits. boldly regular deposits wake blithely ironic accounts|
+7|4|9460|299.58|. furiously final ideas hinder slyly among the ironic, final packages. blithely ironic dependencies cajole pending requests: blithely even packa|
+8|9|6834|249.63|lly ironic accounts solve express, unusual theodolites. special packages use quickly. quickly fin|
+8|1|396|957.34|r accounts. furiously pending dolphins use even, regular platelets. final|
+8|3|9845|220.62|s against the fluffily special packages snooze slyly slyly regular p|
+8|5|8126|916.91|final accounts around the blithely special asymptotes wake carefully beyond the bold dugouts. regular ideas haggle furiously after|
+9|10|7054|84.20|ts boost. evenly regular packages haggle after the quickly careful accounts. |
+9|2|7542|811.84|ate after the final pinto beans. express requests cajole express packages. carefully bold ideas haggle furiously. blithely express accounts eat carefully among the evenly busy accounts. carefully un|
+9|4|9583|381.31|d foxes. final, even braids sleep slyly slyly regular ideas. unusual ideas above|
+9|6|3063|291.84| the blithely ironic instructions. blithely express theodolites nag furiously. carefully bold requests shall have to use slyly pending requests. carefully regular instr|
+10|1|2952|996.12| bold foxes wake quickly even, final asymptotes. blithely even depe|
+10|3|3335|673.27|s theodolites haggle according to the fluffily unusual instructions. silent realms nag carefully ironic theodolites. furiously unusual instructions would detect fu|
+10|5|5691|164.00|r, silent instructions sleep slyly regular pinto beans. furiously unusual gifts use. silently ironic theodolites cajole final deposits! express dugouts are furiously. packages sleep |
+10|7|841|374.02|refully above the ironic packages. quickly regular packages haggle foxes. blithely ironic deposits a|
+11|2|4540|709.87|thely across the blithely unusual requests. slyly regular instructions wake slyly ironic theodolites. requests haggle blithely above the blithely brave p|
+11|5|4729|894.90|ters wake. sometimes bold packages cajole sometimes blithely final instructions. carefully ironic foxes after the furiously unusual foxes cajole carefully acr|
+11|8|3708|818.74|inal accounts nag quickly slyly special frays; bold, final theodolites play slyly after the furiously pending packages. f|
+11|1|3213|471.98|nusual, regular requests use carefully. slyly final packages haggle quickly. slyly express packages impress blithely across the blithely regular ideas. regular depe|
+12|3|3610|659.73|jole bold theodolites. final packages haggle! carefully regular deposits play furiously among the special ideas. quickly ironic packages detect quickly carefully final|
+12|6|7606|332.81|luffily regular courts engage carefully special realms. regular accounts across the blithely special pinto beans use carefully at the silent request|
+12|9|824|337.06|es are unusual deposits. fluffily even deposits across the blithely final theodolites doubt across the unusual accounts. regular, |
+12|2|5454|901.70|s across the carefully regular courts haggle fluffily among the even theodolites. blithely final platelets x-ray even ideas. fluffily express pinto beans sleep slyly. carefully even a|
+13|4|612|169.44|s. furiously even asymptotes use slyly blithely express foxes. pending courts integrate blithely among the ironic requests! blithely pending deposits integrate slyly furiously final packa|
+13|7|7268|862.70|s sleep slyly packages. final theodolites to the express packages haggle quic|
+13|10|864|38.64|s after the slyly pending instructions haggle even, express requests. permanently regular pinto beans are. slyly pending req|
+13|3|9736|327.18|tect after the express instructions. furiously silent ideas sleep blithely special ideas. attainments sleep furiously. carefully bold requests ab|
+14|5|5278|650.07|e quickly among the furiously ironic accounts. special, final sheaves against the|
+14|8|5334|889.50|ss dependencies are furiously silent excuses. blithely ironic pinto beans affix quickly according to the slyly ironic asymptotes. final packag|
+14|1|3676|893.39|sits are according to the fluffily silent asymptotes. final ideas are slyly above the regular instructions. furiousl|
+14|4|4947|310.13| final deposits boost slyly regular packages; carefully pending theodolites |
+15|6|7047|835.70|blithely quick requests sleep carefully fluffily regular pinto beans. ironic pinto beans around the slyly regular foxe|
+15|9|3336|784.55|slyly. fluffily bold accounts cajole furiously. furiously regular dependencies wak|
+15|2|3316|265.89|e express instructions. ironic requests haggle fluffily along the carefully even packages. furiously final acco|
+15|5|5255|458.67|refully bold instructions among the silent grouches must boost against the express deposits:|
+16|7|5282|709.16|lithely ironic theodolites should have to are furiously-- |
+16|10|9412|887.53|ly special accounts wake. fluffily bold ideas believe blith|
+16|3|854|781.91| unusual excuses. requests after the carefully regular pinto |
+16|6|1491|918.51|unts cajole furiously across the fluffily pending instructions. slyly special accounts could have to boost b|
+17|8|8555|995.35|are furiously final accounts. carefully unusual accounts snooze across the requests. carefully special dolphins|
+17|1|7737|648.75|e blithely express accounts. foxes kindle slyly unusual dinos. quickly special f|
+17|4|3123|555.04|ly bold accounts. regular packages use silently. quickly unusual sentiments around the quickly ironic theodolites haggle furiously pending requests. care|
+17|7|3203|64.40|bold packages nag fluffily after the regular accounts. furiously ironic asymptotes sleep quickly enticing pinto beans. carefully pending accounts use about the |
+18|9|1125|664.17|. ironic, regular accounts across the furiously express |
+18|2|8132|52.44| final packages wake quickly across the blithely ironic instructions. regular pains integrate slyly across the deposits. carefully regular pinto beans among the close|
+18|5|3133|568.61|riously bold accounts. packages boost daringly. blithely regular requests cajole. regular foxes wake carefully final accounts. blithely unusual excuses det|
+18|8|6475|386.29|. furiously regular accounts cajole slyly across the pending|
+19|10|1416|144.80|o beans. even packages nag boldly according to the bold, special deposits. ironic packages after the pinto beans nag above the quickly ironic requests. bl|
+19|3|5467|405.70|nstructions use furiously. fluffily regular excuses wake. slyly special grouches are carefully regular Tiresias. regular requests use about the quickly furio|
+19|6|8800|635.66|sual requests sleep carefully. deposits cajole carefully over the regular, regular requests. quickly unusual asymptotes use some|
+19|9|1340|346.92| requests. final, pending realms use carefully; slyly dogged foxes impress fluffily above the blithely regular deposits. ironic, regular courts wake carefully. bold requests impress|
+20|1|2927|675.54|s, ironic deposits haggle across the quickly bold asymptotes. express, ironic pinto beans wake carefully enticingly special foxes. requests are at the c|
+20|4|2723|305.84|nal, bold frets cajole slyly regular, unusual platelets. slyly permanent deposits wake carefully carefully silent accounts. even, even requests wake quickly. furiously pending packages are|
+20|7|5905|546.66|ing deposits use furiously. ironically final pinto bea|
+20|10|4271|115.89|xcuses wake at the deposits. regular pinto beans nag slyly fluffi|
+21|2|6571|944.44|ing instructions impress bold foxes. ironic pinto beans use. thinly even asymptotes cajole ironic packages. quickly ironic pinto beans detect slyly regular deposits. ruthlessly even deposits are. sl|
+21|6|1704|139.05|posits cajole; quickly even requests sleep furiously. ironic theodolites sleep pending, express instructions. stealthily even platelets cajole carefully after the final, ironic p|
+21|10|7153|664.50|blithely enticing instructions use alongside of the carefully thin deposits. blithely bold requests are fluffily|
+21|4|367|584.86|ong the even theodolites. pending, pending accounts sleep-- courts boost quickly at the accounts. quickly fin|
+22|3|4410|786.18|even accounts. final excuses try to sleep regular, even packages. carefully express dolphins cajole; furiously special pinto bea|
+22|7|9779|635.84|l instructions cajole across the blithely special deposits. blithely pending accounts use thinly slyly final requests. instructions haggle. pinto beans sleep along the slyly pen|
+22|1|7834|359.16|sits wake fluffily carefully stealthy accounts. furiously ironic requests x-ray fluffily alongside of the pending asymptotes. slyly silent packages use along the instructions. fu|
+22|5|1434|597.21|ix across the blithely express packages. carefully regular pinto beans boost across the special, pending d|
+23|4|2739|460.12|platelets against the furiously bold Tiresias dazzle quickly into the special, bold courts. silent, regular instructions wake blithely ironic multipliers. ideas|
+23|8|5739|103.13| theodolites need to nag blithely final notornis. slyly idle packages cajole after the furiously stealthy packages. slyly regular accounts use furiously. carefully final accounts affix |
+23|2|9898|233.94|l, express packages wake permanently. quickly even deposits sleep quickly slyly silent id|
+23|6|7035|51.75|xcuses; decoys wake after the pending packages. final instructions are furi|
+24|5|5180|905.41|heodolites above the ironic requests poach fluffily carefully unusual pinto beans. even packages acc|
+24|9|2227|511.20|, silent packages boost around the instructions. special requests sleep slyly against the slyly regular deposits. final, final accounts haggle fluffily among the final requests. regular |
+24|3|7182|582.03| the final, ironic asymptotes. regular requests nag instead of the carefully unusual asymptotes. furiously pending attainments among the slyly final packages boost after th|
+24|7|5318|62.15| careful requests cajole blithely realms. special asymptotes sleep. pinto beans sleep carefully furiously ironic packages. furiously |
+25|6|9029|832.74|fully fluffily regular frets. sometimes even requests after the requests wake slyly at the quickly ruthless requests. a|
+25|10|9062|928.96|he foxes. final, final accounts sleep. boldly ironic excuses thrash quick|
+25|4|9946|694.35|ld, ironic requests. furiously special packages cajole furiously enticing instructions.|
+25|8|7340|746.59|dly final packages haggle blithely according to the pending packages. slyly regula|
+26|7|5020|683.96|es. fluffily express deposits kindle slyly accounts. slyly ironic requests wake blithely bold ideas|
+26|1|6577|892.20|riously pending pinto beans. furiously express instructions detect slyly according to the b|
+26|5|3499|382.11|imes even pinto beans among the busily ironic accounts doubt blithely quickly final courts. furiously fluffy packages despite the carefully even plate|
+26|9|9702|821.89| behind the blithely regular courts impress after the silent sheaves. bravely final ideas haggle |
+27|8|2111|444.01|the even, ironic deposits. theodolites along the ironic, final dolphins cajole slyly quickly bold asymptotes. furiously regular theodolites integrate furiously furiously bold requests. carefully|
+27|2|9080|157.03|ole express, final requests. carefully regular packages lose about the regular pinto beans. blithely re|
+27|6|3407|151.34|ironic theodolites are by the furiously bold ideas. ironic requests shall have to sublate final packages. furiously quick foxes alongside of the express, special deposits was boldly according |
+27|10|4283|348.61|ound the final foxes detect furiously across the even warhorses. quickly t|
+28|9|6643|204.86|y ironic deposits above the slyly final deposits sleep furiously above the final deposits. quickly even i|
+28|3|2452|744.57|ully regular theodolites haggle about the blithely pending packages. carefully ironic sentiments use quickly around the blithely silent requests. slyly ironic frays bo|
+28|7|302|690.30|uickly unusual requests alongside of the final courts integrate slyly |
+28|1|9988|666.53|beans haggle carefully around the slyly ironic acco|
+29|10|3506|799.27|leep fluffily according to the quietly regular requests: accounts integrate carefully bold foxes. carefully silent|
+29|4|8106|981.33|the ironic, bold asymptotes! blithely regular packages hang furiously above the dependencies. blithely permanent dependencies are furiously furiously ironic acco|
+29|8|9193|734.44|ly unusual packages. foxes cajole. theodolites nag|
+29|2|6252|186.21|thely carefully even packages. even, final packages cajole after the quickly bold accounts. fluffily quick accounts in place of the theodolites doze slyly f|
+30|1|4767|989.05|ts. slyly final pinto beans cajole ironic accounts. blithely final accounts use among the request|
+30|5|535|743.26|sual instructions wake carefully blithely even hockey playe|
+30|9|7756|568.86| special foxes across the dependencies cajole quickly against the slyly express packages! furiously unusual pinto beans boost blithely ironic Tir|
+30|3|7945|583.84| sleep. bold, regular deposits hang doggedly furiously bold requests. slyly bold excuses detect busily above the even gifts. blithely express courts are carefully. blithely final packages until th|
+31|2|9685|620.84|he blithely regular ideas. blithely unusual requests haggle fluffily. platelets|
+31|7|1951|120.99|refully regular pinto beans. ironic requests integrate furiously since the quickly ruthless platelets. quickly ironic attainments ha|
+31|2|1402|761.64|r platelets nag blithely regular deposits. ironic, bold requests |
+31|7|137|849.11|blithely ironic accounts. slyly ironic asymptotes sleep ironic, even accounts. regular accounts thrash quickly|
+32|3|2203|406.03|es? slyly enticing dugouts haggle carefully. regular packages alongside of the asymptotes are carefull|
+32|8|467|109.34|ainst the unusual braids nod fluffily packages. regular packages nod among the slyly express|
+32|3|7975|747.14|final foxes boost furiously pending packages. quickly regular depths promise blithely accoun|
+32|8|7938|856.09|s integrate according to the even dependencies. carefully regular reque|
+33|4|4028|891.46|, pending requests affix slyly. slyly ironic deposits wake accounts. express accounts sleep slowly. ironic, express accounts run carefully fluffily final dependencies. furiously unusual ideas|
+33|9|4410|929.05| packages sleep carefully. slyly final instructions boost. slyly even requests among the carefully pending platelets wake along the final accounts. quickly expre|
+33|4|1287|310.76|dolites above the slyly express deposits try to haggle blithely special gifts. blithely ironic reque|
+33|9|6006|327.19|ly. ironic dependencies haggle carefully silent instructions. furiously ironic dolphins are fluffily furiously even theo|
+34|5|9934|848.75|ven instructions besides the gifts are furiously among the slyly regular packages! instructions use carefully. even requests sleep quickl|
+34|10|4749|265.31|ckly regular theodolites eat above the bravely regular courts. ironic requests wake slyly.|
+34|5|5459|824.69|ong the slyly silent requests. express, even requests haggle slyly|
+34|10|5884|609.69|ully final tithes. slyly ironic deposits hang furiously about the regular, regular deposits|
+35|6|2500|451.58|nic packages boost carefully carefully even theodolites. blithely fina|
+35|1|8875|537.72|ully regular deposits: special accounts use. slyly final deposits wake slyly unusual, special ideas. asymptotes |
+35|6|596|669.19|slyly against the daring, pending accounts. fluffily special pinto beans integrate slyly after the carefully unusual packages. slyly bold accounts besides|
+35|1|2025|411.17|s cajole fluffily final deposits. furiously express packages after the blithely special realms boost evenly even requests. slow requests use above the unusual accoun|
+36|7|3907|630.91|al deposits detect fluffily fluffily unusual sauternes. carefully regular requests against the car|
+36|2|174|434.47|permanently express instructions. unusual accounts nag toward the accou|
+36|7|2625|569.91|ctions. pending requests are fluffily across the furiously regular notornis. unusu|
+36|2|8209|289.15|arefully regular requests cajole. special, express foxes sleep slowly. quickly unusual in|
+37|8|7171|824.96|usly into the slyly final requests. ironic accounts are furiously furiously ironic i|
+37|3|5542|126.59|ven deposits. ironic foxes cajole. slyly final deposits are furiously after the furiously even packages. slyly ironic platelets toward the slyl|
+37|8|7113|15.72|re bravely along the furiously express requests. blithely special asymptotes are quickly. fluffily regular packages alo|
+37|3|1449|745.64|y after the ironic accounts. blithely final instructions affix blithely. bold packages sleep carefully regular instructions. regular packages affix carefully. stealthy fo|
+38|9|1226|570.11| slyly even pinto beans. blithely special requests nag slyly about the ironic packages. |
+38|4|4237|662.75|lar warhorses cajole evenly against the attainments. requests cajole furiously furiously express requests. carefully regular platelets use fluffily after the silent, unusual ideas: bl|
+38|9|1135|160.70|express accounts haggle. carefully even pinto beans according to the slyly final foxes nag slyly about the enticingly express dol|
+38|4|3516|847.09|nal accounts. furiously pending hockey players solve slyly after the furiously final dependencies. deposits are blithely. carefully regular packages unwind busily at the deposits. fluffily |
+39|10|3633|463.10|kages are slyly above the slyly pending pinto beans. bold, ironic pinto beans sleep against the blithely regular requests. fluffily even pinto beans use. regular theodolites haggle against the quic|
+39|5|3682|300.43|ng requests are according to the packages. regular packages boost quickly. express Tiresias sleep silently across the even, regular ideas! blithely iro|
+39|10|5475|532.26| beans cajole carefully carefully express requests. instructions sleep furiously bold deposits. furiously regular depos|
+39|5|6259|737.86|y. special, even asymptotes cajole carefully ironic accounts. regular, final pinto beans cajole quickly. regular requests use warhorses. special, special accounts hinder boldly across the|
+40|1|7690|776.13|lets use fluffily carefully final deposits. blithely ironic instructions sublate against the furiously final ideas; slyly bold courts x-ray silent foxes. regular foxes wake blithely. slyl|
+40|6|1704|565.82|riously furiously silent asymptotes. final deposits cajole blithely ironic requests. furiously special pains into the blithely final instru|
+40|1|4521|374.71|ptotes haggle. slyly even requests nag fluffily silent packages. blith|
+40|6|6617|196.64|he slyly unusual epitaphs? ironic deposits at the furiously unusual instructions thrash blithely requests. requests are carefully blithely pending waters.|
+41|2|9040|488.55|ss the dinos wake along the blithely regular theodolites. foxes cajole quickly ironic, final foxes. blithely ironic packages haggle against |
+41|8|5946|391.81| slyly slyly regular requests. final deposits sleep fluffily. blithely bold instructions detect carefully. blithely pending requests are furiously ironically final ideas. regul|
+41|4|1550|916.55| the blithely final ideas. furiously regular asymptotes could cajole furious|
+41|10|560|37.59|special pinto beans against the unusual accounts cajole slyly final foxes. close, ironic|
+42|3|2893|716.81|requests nag. furiously brave packages boost at the furiously even waters. slyly pending ideas nag carefully caref|
+42|9|2927|709.06|g dugouts. carefully careful ideas are fluffily. carefully final pinto beans snooze. ironic deposits wake evenly along |
+42|5|3500|200.00|against the ironic, ironic forges. slyly final deposits wake blithely. ironic courts sleep furiously ab|
+42|1|3662|29.46|es sleep slyly among the slyly final requests. bold theodolites use silently against the final foxes. carefully pending requests use furiously. dogged, unusual asymptotes use |
+43|4|3211|805.78|gular accounts. bold theodolites nag slyly. quickly express excuses use blithely. blithely even ideas boost fluffily! blithely unusual ideas detect bli|
+43|10|6770|493.19|ing to the quickly even theodolites. quickly bold excuses haggle. sometimes unusua|
+43|6|9506|493.65|riously! slyly ironic sauternes affix. ironic theodolites sleep furiously about the express packages. slyly ironic deposits are blithely against the regular package|
+43|2|3232|307.12|counts: express, final platelets use slyly bold ideas. ironic theodolites about the blithely s|
+44|5|486|164.22| final notornis throughout the unusual pinto beans are about the special accounts. bold packages sleep fluffily above the|
+44|1|5310|114.37|quests. quickly unusual requests against the carefully final somas detect slyly bold a|
+44|7|3534|383.01|r the pending pinto beans! requests wake furiously after the special deposits. silent deposits mold quickly along the express, special |
+44|3|4798|833.15| run. ironic, special dolphins according to the even, ironic deposits haggle carefully alongside of the carefully regular excuses. regular frays haggle carefully ironic dependenc|
+45|6|1685|919.63|he doggedly final accounts; carefully regular packages cajole idly regular idea|
+45|2|5202|877.29|ngage blithely after the final requests. bold accounts sleep blithely blithely express dependencies. pinto beans through the carefully regular hockey players wake|
+45|8|5669|532.70|es play carefully doggedly unusual requests. bold grouches against the furiously ironic dugouts sleep furiously qu|
+45|4|1872|155.32| ironic, even pinto beans. bold theodolites haggle after the furiously ironic accounts. slyly bold courts|
+46|7|4171|244.65|lly quiet instructions. furiously express requests among the final ideas cajole carefully bold waters. furiously regular pac|
+46|3|8518|106.80|e unusual instructions shall have to detect slyly blithely ironic foxes. bold requests impress silent foxes. ironic, quiet realms haggle quickly pending, express pinto be|
+46|9|7225|14.78|ously about the fluffily pending accounts. fluffily even dugouts are quickly slyly express platelets; quickly bold pearls sleep slyly even instructions. furiously ironic packages poach quic|
+46|5|1381|985.88|ending platelets are carefully regular accounts. fluffily even accounts against the dependencies nag carefully final, |
+47|8|6989|292.52|even ideas. blithely final requests boost blithely. final, ironic instruct|
+47|4|4458|539.47|; finally enticing theodolites cajole enticing, silent warhorses! slyly bold pains c|
+47|10|2896|74.54|grate final asymptotes. pending requests kindle carefully final frets. ironic deposits above the slyly e|
+47|6|5873|296.63|after the regular dependencies. final, bold pains sleep quickly pend|
+48|9|5052|611.16|posits are blithely blithely final foxes. blithely even deposits haggle fluffily express requests. furiously final theodolites use sl|
+48|5|9451|191.36|ckages cajole never even, special foxes. regular dependencies wake after the blithely ironic instructions. thinly ironic reque|
+48|1|5564|668.19|al pinto beans. furiously final frays use slyly according to the ironic theodolites. regular ideas cajole furiously after the slyly even deposits. |
+48|7|1719|606.16|forges lose. packages cajole regular, bold accounts. never ironic accounts may promise about the permanently bold deposits. always express requests cajole fluffily regular c|
+49|10|9056|35.11| bold deposits? final, bold pinto beans are furiously slyly regular packages. sly|
+49|6|6646|908.15|ts sleep across the fluffily final deposits. carefully express accounts around the regular, express excuses x-ray inside the ironic theodolites. expre|
+49|2|5336|713.25|ld accounts. furiously blithe waters use furiously blithely idle dependencies. pending deposits along the permanently re|
+49|8|597|812.62|n foxes snooze furiously. courts integrate never. carefully unusual requests are carefully. quickly ironic deposits ha|
+50|1|1832|565.54|liers above the dolphins dazzle across the regular foxes. furiously regular packages haggle furiously blithely ironic grouches. ironic, even accounts haggle pending, furious instruction|
+50|7|43|690.87|aggle daringly along the close, express deposits. final requests snooze carefully carefully bold deposits. carefully unusual ideas doze furiously after the furious|
+50|3|6160|301.06|arefully ironic requests use. furiously pending waters play carefully carefully regular platelets. sly requests cajole furiously slyly regular pinto beans. bold packages boost fluffily. furiously i|
+50|9|2104|107.17|t blithely unusual theodolites. quickly final accounts affix fluffily regular requests. c|
+51|2|837|310.74|ly dogged, regular dependencies. express, even packages are |
+51|9|7318|85.03|al foxes. carefully ironic accounts detect carefully-- slyly even accounts use. furiously final platelets shall haggle sometimes after the blithely regu|
+51|6|138|728.95|requests according to the carefully unusual deposits promise slyly ironic packages. slyly ironic dependencies are accordin|
+51|3|8062|901.04|le ruthlessly furiously slow requests. fluffily slow depende|
+52|3|6533|54.92|efully. slyly special deposits haggle along the quick deposits. slyly pending requests use quickly packages. final, final dolphins doubt according to the quickly unusual excuses|
+52|10|1937|210.44|s. never even asymptotes nag carefully! regularly unusual foxes along the unusual requests haggle accounts. fluffily express pinto |
+52|7|4084|628.53| deposits wake slyly pending asymptotes. ironic asymptotes haggle. blithely ironic requests are qui|
+52|4|5524|424.93|cial, ironic packages. even dolphins boost. slyly final deposits integrate. final sheaves along the silent excuses use at the slyly close foxes; bold accounts are finally even packages. ironi|
+53|4|6443|192.78|carefully ironic accounts. blithely bold deposits detect furiously against the flu|
+53|1|5319|563.44|ly. fluffily final pearls boost carefully. special sauternes nod furiously even instructions. carefully regular dependencies across the slyly regular deposits|
+53|8|8200|388.08|fully requests. furiously final accounts cajole express, regular pearls. special deposits wake fluffily express accounts. quic|
+53|5|6929|224.83|xes. carefully ruthless asymptotes impress slyly. fluffily final deposits sleep against the ideas. slyly final packages wake. pending, express packages sleep quickly.|
+54|5|2515|686.51|ly along the packages. blithely close pinto beans are blithely alongside of the unusual packages. carefully even platelets boost alongside of the even foxes. ironic de|
+54|2|7079|798.98|he carefully unusual packages wake according to the ironic dolphins. permanently regular sheaves nag quickly. regular, ironic|
+54|9|2386|23.78|kly ironic foxes. final instructions hinder doggedly. carefull|
+54|6|536|259.24| furiously along the fluffily regular requests. carefully unusual accounts use fluffily final platelets. pending deposits integrate furiou|
+55|6|7874|611.04|ly special packages. furiously even warhorses integrate. silen|
+55|3|8460|236.27|round the special, bold asymptotes cajole alongside of the instructions. qui|
+55|10|8278|134.62|gedly silent pinto beans! furiously regular sentiments was furiously across the silent pinto beans. pending warthogs along the slyly |
+55|7|1289|130.33|ut the blithely final requests. requests nag blithely. |
+56|7|241|855.39|nto beans. finally regular sauternes are. carefully bold deposits according to the blithely express requests wake carefully ironic excuses? furiously final deposit|
+56|4|9104|54.79|tructions above the blithely pending foxes cajole blithely furiously even sentiments. special, exp|
+56|1|1330|52.29|xpress instructions haggle furiously regular deposits. quickly unusual packages sleep furiously final pinto|
+56|8|5799|926.25|ades grow around the dependencies. carefully special ideas cajole furiously across the blithely express requests. unusual tithes are caref|
+57|8|2972|123.11| asymptotes use carefully furiously final deposits. quickly regular deposits are furiously slyly ironic requests. blithely even excuses haggle: blithely special ideas|
+57|5|4721|411.08|instructions. quickly unusual deposits about the furiously special ideas believe among the furiously bold theodolites. unusual, even ideas nag: slow, special theodolites hagg|
+57|2|3788|211.66|ly according to the ironic requests-- slyly final accounts print carefully depths? pending, unusual accounts solve |
+57|9|4583|137.68|ts. blithely bold theodolites can boost carefully carefully even instr|
+58|9|4328|542.52|ven deposits wake requests. quickly bold platelets sleep furiously after the ironic requests. even accounts haggle quickly bold |
+58|6|4307|448.31|quickly carefully ironic foxes. bold platelets nag furiously regular packages. slyly specia|
+58|3|4136|512.24|packages cajole slyly quickly pending depths. special, bold realms cajole slyly. slyly ir|
+58|10|9689|25.09|long the unusual, express asymptotes. ironic ideas boost bold, special deposits? ironic foxes among the fin|
+59|10|8374|357.22|c decoys. carefully even pinto beans wake slyly alongside of the express accounts. regular grouches haggle.|
+59|7|4226|80.98|lar packages. regular depths use slyly after the fluffily regular packages; theodolites around the furiously ironic asy|
+59|4|99|598.55|he special pinto beans. fluffily even accounts cajole. fluffily regular foxes haggle among the|
+59|1|8184|45.50|ependencies. ironic dependencies wake carefully according to the blithely bold packages. quickly unusual ideas about th|
+60|1|6642|800.72| blithely. slyly final realms alongside of the excuses use quickly blithely bold foxes. final theodolites are slyly after the slyly regular excuses. never thin foxes about |
+60|8|5017|314.81| even pinto beans wake carefully. quickly regular deposits hinder along the furiously regular pack|
+60|5|148|504.10|s use fluffily. furiously regular deposits boost furiously against the even instructions. blithely final platelets wake. carefully pending asymptotes sleep blithely. regular, s|
+60|2|5792|92.64|s the carefully pending deposits. slyly regular pinto beans against the furiously regular grouches lose carefully around the enticingly final ideas. furiously express packages cajole bold pa|
+61|2|1540|858.64| could have to use upon the packages. fluffily special packages integrate slyly final theodolites. pending warhorses wake quickly after the blithely final fo|
+61|10|9170|771.26|ly. pinto beans sleep blithely about the patterns. slyly final accounts wake according to the furiously bold requests. slyly regular packages wake according to the ironic packages. requests acros|
+61|8|4762|633.74|final theodolites haggle. fluffily express ideas about the silent theodolites cajole ideas; fluffily special instructions are accordin|
+61|6|7312|153.74|gly final instructions. pending theodolites will wake furiously. slyly bold instructions run. furiously special foxes cajole f|
+62|3|1780|692.42|s around the even ideas cajole furiously somas. silent asym|
+62|1|5896|348.82| final accounts. furious deposits wake slyly. idly regular packages haggle blithely pending grouches. ironic accounts boost blithely. carefully express pa|
+62|9|9127|620.08|totes. unusual requests after the unusual accounts sleep fluffily bold notornis. slowly careful requests use according to the final ideas. pinto beans sleep. foxes are furiously furiously pe|
+62|7|9542|255.78|lly express requests haggle carefully. idle, pending pinto beans are furiously regular excuses. quickly sly attainments are furiously; even accounts are slyly quickl|
+63|4|1804|498.84|leep bravely. final accounts nag. forges sleep against the slyly ironic pa|
+63|2|1998|509.16|yly express theodolites. slyly bold ideas sleep furiously accordi|
+63|10|6839|274.15| among the carefully ironic accounts. carefully even accounts against the regular, final deposits detec|
+63|8|6325|463.69|arly express accounts. express, unusual escapades haggle. special packages must wake. express, regular requests sleep furiously ironic packages|
+64|5|5567|228.61|y even instructions. unusual requests serve slyly. special foxes sleep quickly. fluffily ir|
+64|3|4542|398.92|. quickly final ideas cajole carefully among the blithely silent requests. sometimes ironic accounts nag furiously against the pending instructions. f|
+64|1|9110|602.65| ironic accounts are carefully carefully final accounts. slyly ironic packa|
+64|9|2064|25.77| quickly regular ideas. carefully final requests snooze carefully regular, regular instructions. stealthily final pi|
+65|6|2918|846.26|inal, even foxes cajole. furiously final dolphins hang quickly ironic foxes. furiously special packages alongside of the bold foxes solve above the carefully final instructio|
+65|4|1779|393.63|ully after the quickly regular ideas. ironic, final multipliers above the carefully bold deposits breach slyly furiously express deposits. unusual accounts haggle carefully idea|
+65|2|2054|503.10|e express excuses. ironic, even accounts across the reg|
+65|10|2188|288.73|lent requests nag quickly. blithely silent platelets haggle ironic accounts. slyly bold instructions boost carefully final accounts. carefully even dependencies must nag blithely; qui|
+66|7|3077|809.13|nod carefully besides the furiously final theodolites. slyly final requests haggle. furiously silent excuses detect quickly. ironic deposits detect above the furiously final |
+66|5|1076|785.75|its across the blithely regular theodolites wake furiously among the furiously regular accounts. pains are slyly care|
+66|3|2568|447.08|ously even accounts boost slyly daring requests. even, regular realms kindle blithely. unusual, ironic ins|
+66|1|296|797.27|s nag enticingly outside the furiously final foxes. final accounts haggle fluffily accord|
+67|8|9923|306.37|ly according to the quickly ironic requests. express instructions after the slyly even instructions x-ray blith|
+67|6|7908|546.75|furiously express dolphins integrate carefully regular notor|
+67|4|3368|625.62|le slyly regular requests: regular platelets wake quickly across the quickly regular accounts. reg|
+67|2|5826|397.34|en, ironic deposits affix quickly unusual requests. busily ironic accounts are finally never even sauternes. ironic depos|
+68|9|3444|31.37|es impress furiously pending packages. always silent instructions above the fluffily bold packages haggle slyly blit|
+68|7|6762|5.16|lithely. carefully even grouches along the bold deposits might sleep slyly requests. blithel|
+68|5|8300|80.86|nooze according to the furiously even ideas. blithely regular accounts wake blithely. furiously regular Tiresias cajole regular deposits. regular theodolites eat alongside of the|
+68|3|5399|683.59|. finally final pinto beans play carefully unusual requests. never pending accounts are. regular, final theodolites wake furiously excuses. special request|
+69|10|6197|694.24|eep across the packages. regular, final foxes boost fluffily regular pinto beans. packages sleep along the final requests. bold, unusual packages cajo|
+69|8|8235|846.49|nt fluffily. carefully ironic instructions wake. blithely express foxes cajole slyly. unusual requests sleep quickly. final packages affix slyly according to the spec|
+69|6|9294|386.96|ar packages. blithely regular dependencies are dolphins. slyly ironic excuses nag quickly pending, regular ideas. furiously special sheaves haggle. close, regular pinto beans about the slyly bold|
+69|4|7017|344.28|heodolites. unusual, regular requests boost slyly pending deposits. slyly daring instruct|
+70|1|4536|348.27|ructions. blithely final packages cajole carefully after the express, even requests. furiously final theodolites cajole |
+70|9|8063|452.80|y regular deposits nag about the carefully regular instructions; furiously express accounts along the final, express instruct|
+70|7|2990|940.81|s deposits. unusual foxes are carefully according to the carefully even deposits. carefully ironic foxes cajole fluffily against the carefully pending deposits. slyly special depo|
+70|5|9074|182.58|ions after the fluffily regular foxes wake above the furiously regular requests: slyly regular deposits wake slyly daringly even Tiresias. express, express deposits are. always unusual pa|
+71|2|508|842.21|es cajole carefully around the furiously pending instructions. |
+71|1|8329|239.57|ins sleep carefully slyly express accounts! quickly even accounts boost carefully about the carefully regular excuses. dogged, even dolphins against the sometimes ironic packages believe bl|
+71|10|6768|744.67|ructions. daring requests solve carefully about the furiously pending pinto|
+71|9|5179|329.13|usly at the packages. blithely regular deposits haggle regular packages. quickly special theodolites at the blithely ironic instructions wake|
+72|3|9855|497.26|tithes. quickly pending foxes haggle enticingly according to the accounts. accounts detect slyly: final packages wake. fina|
+72|2|9346|41.04| pending instructions before the even, silent dep|
+72|1|2654|762.61|nusual packages: blithely bold Tiresias sleep furiously. slyly brave accounts according to the final, |
+72|10|4526|154.47|use across the never ironic packages. express, regular accounts above the pending, fluffy deposits are carefully across the slyly even pinto be|
+73|4|9873|947.99|tes use pending packages. final foxes wake final, unusual packages. blithely blithe ideas haggle sometimes slyly express accounts. express instructions nag furiously quickly|
+73|3|7729|920.66|ecial accounts sleep according to the slyly sly accounts. slyly express instructions nag. accounts cajole furiously quickly even foxes. furiously regular requests wake. carefully even frets haggle |
+73|2|5327|108.96| beans are furiously between the regular ideas! unusual pinto beans use. furiously silent requests against the carefully even somas wake care|
+73|1|3928|309.57|longside of the blithely final ideas. carefully ironic courts sleep along the enticingly pending requests. fluffily regular accounts use fluffily bold ideas. slyly ironic packa|
+74|5|3128|345.92|ic theodolites. express deposits haggle blithely pending packages. quickly express foxes could are slyly. deposits sleep deposits. final dependencies sleep ab|
+74|4|2479|930.97|o beans sleep dependencies. regular accounts use blithely asymptotes. u|
+74|3|9473|496.36| haggle carefully alongside of the regular requests. slyly regular accounts belie|
+74|2|6234|849.66| slyly regular foxes. silent accounts integrate. even deposits are quick|
+75|6|7086|624.39|sits are furiously fluffily even courts. furiously pending requests are blithely. pending, regular accounts play carefully slyly unusual platelets. blithely final requests against the ru|
+75|5|6308|759.36|refully ironic dependencies. pinto beans use according to the packages. regular platelets wake around the blithely p|
+75|4|9080|433.59|sits. permanent packages breach. carefully final waters wake. bold, pending foxes haggle furiously evenly express instructions. even deposits about the final|
+75|3|5439|884.01|ding excuses snooze special accounts. tithes alongside of the regular dep|
+76|7|6754|494.83|gular accounts solve. ironic deposits sleep slyly even packages. slyly pending accounts detect slyly express accounts. ironic forges can play furiously carefully express fox|
+76|6|2009|108.97|n packages. blithely even accounts sleep carefully furiously ironic accounts. carefully express requests|
+76|5|6371|552.38|ts use against the quickly ironic ideas. quickly even deposits are carefully a|
+76|4|7986|252.03| packages across the furiously ironic platelets cajole across the regular, ironic accounts. carefully enticing accounts among the blithely regular instructions detect regular pinto be|
+77|8|552|254.92|e after the carefully pending packages. carefully even dependencies cajole pending |
+77|7|8170|875.83|xcuses. blithely even foxes use fluffily. blithely even requests use. slyl|
+77|6|8541|936.13|e slyly express instructions haggle about the sometimes regula|
+77|5|1713|402.14|the even ideas kindle after the requests. regular theodolites cajole carefully about the blithely final ideas. carefully even dependencies at the flu|
+78|9|9915|729.94|around the special excuses. furiously even deposits serve boldly according to the platelets. carefully express accounts at the blithely unusual pinto beans sleep furiously against the u|
+78|8|7246|577.23|regular dependencies cajole doggedly ironic accounts. bold theodolites doze about the accounts. quickly final requests boost slyly final asymptotes. carefully final dolphins ha|
+78|7|1801|434.34|nts kindle furiously according to the even packages. blithely ironic platelets are slyly silent foxes. final, final packages would sleep. pinto beans a|
+78|6|9599|382.82| carefully special theodolites cajole among the quickly even asymptotes. foxes wake blithely across the carefully |
+79|10|4248|765.34|nusual, express asymptotes wake furiously. ironic pinto beans detect above the carefully express theodolites: even, dogged instructions nag. spe|
+79|9|465|28.33|uriously special frays cajole across the finally ironic pinto beans. ironic accounts sleep blithely. fluffily silent accounts are slyly at the slyly unusual ideas. even deposits nag slyly |
+79|8|3309|880.23|tect final, thin accounts? furiously ironic accounts boost regular deposits. carefully ironic attainments sleep. furiously special ins|
+79|7|8627|891.18|r dolphins grow blithely against the slyly ironic packages. deposits about the regular, ironic decoys are slyly around the carefully regular packages. slyly pending excuses sle|
+80|1|8893|127.65|ld accounts detect carefully. carefully bold courts along the regular deposits could have to affix ca|
+80|10|2243|775.79|endencies. bold, regular pinto beans wake furiously above|
+80|9|5385|945.72|cial asymptotes believe after the blithely unusual deposits. furiously silent pinto beans cajole quickly inside the slyly even deposits. regular, f|
+80|8|4034|797.05|ptotes cajole carefully. express ideas cajole carefully even somas. final pinto beans print fluffily across the |
+81|2|1605|550.29|es haggle blithely fluffily final requests. furiously regular foxes use. furiously unusual requests outside the furiously regular requests|
+81|2|5923|220.23|the final, quick accounts are blithely above the s|
+81|2|2942|409.73|accounts boost. fluffily unusual requests cajole fluffily slyly ironic requests. foxes cajole quick|
+81|2|58|492.19| instructions boost furiously across the foxes-- final depo|
+82|3|7793|697.31|he accounts cajole quickly after the even patterns. ironic platelets sublate regular, even asymptotes. quick courts affix according to|
+82|3|7698|585.86|pinto beans. slyly express excuses haggle. blithely even pinto beans about the quick inst|
+82|3|8268|604.25|e after the carefully even theodolites. regular, pending accounts boost. quickly final asymptotes haggle slyly. requests use final, bold pinto beans. bold, ruthle|
+82|3|5532|900.07| slyly? fluffily special dependencies haggle among the slyly special requests. regular, bold packages after the blithely ironic packages are slyly ironic packages. slyly final deposits w|
+83|4|3010|745.51|l foxes along the bold, regular packages integrate carefully express courts! final excuses sleep carefully ironic|
+83|4|8200|399.64|y final platelets are carefully carefully special platelets. carefully ironic requests wake blithely alongside of the slyly even accounts. bold, regular requests sleep |
+83|4|5974|657.22| even packages boost furiously. slyly regular gifts above the accounts are quickly express packages. slyly pending deposits besides the express, even asymptotes haggle after the ironic ins|
+83|4|3890|24.73|deposits. carefully even dependencies across the dependencies haggl|
+84|5|5711|233.61|arefully final platelets cajole blithely; quickly final accounts use furiously. furiously reg|
+84|5|208|469.80|carefully express dolphins nag about the slyly bold requests. slyly even packages wake among the furiously special attainments.|
+84|5|2909|969.44|silent requests cajole slowly bold ideas. special, special deposits according to the always silent packages are against the furiously silent packages. even, blithe accounts sleep slyly across |
+84|5|903|707.77|gly regular dependencies boost. slyly even accounts sleep. furiously final hockey players wake carefully with the reg|
+85|6|2628|608.77|xes wake furiously after the carefully even platelets. blithe theodolites are furi|
+85|6|118|917.83| against the even deposits. furiously bold ideas along the furious requ|
+85|6|2074|491.20|encies-- slyly regular requests about the quiet accounts detect quickly at the |
+85|6|8289|73.81|s cajole slyly along the slyly special accounts. regular, special deposits wake. furiously special foxes boost. blithely even packa|
+86|7|806|65.98|ackages. blithely pending accounts are slyly furiously pending theodolites. furiously eve|
+86|7|2773|250.04|ding accounts. slyly special requests will have to affix carefully along the furiously unusual packages. regular theodol|
+86|7|5546|816.53|s. slyly final requests wake. furious deposits must wake blithely among the blithely ironic instructions. special hockey players try to are bli|
+86|7|1418|332.65|press theodolites sleep carefully about the blithely unusual requests. quickly final deposits breach slyly |
+87|8|5679|688.33|t the carefully regular asymptotes. blithely stealthy pinto beans within the furiously expres|
+87|8|1272|435.42|ronic foxes sleep along the special foxes. final ideas wake quickly about the carefully special theodolites. blithely ironic packages are blithely. regular, regular pint|
+87|8|9041|617.20|furiously final deposits. furiously special dependencies solve across the regular, special ideas. carefully silent requests haggle furiously after the special, specia|
+87|8|1892|868.60|arhorses are. unusual requests use blithely furiously final ideas. final requests sleep theodoli|
+88|9|6116|334.58|ect furiously around the regular deposits. special, final platelets boost furiously. blithely unusu|
+88|9|395|71.50| the regular accounts-- furiously even accounts use quickly after the regular, regular deposits. furiously e|
+88|9|9979|81.82|f the regular, regular requests believe fluffily along the final, quiet decoys. furiously even accounts cajole. carefully express requests wake quickly among the ideas. quickly silent |
+88|9|276|821.43|gular pinto beans. slyly pending excuses breach blithely express accounts. thin deposits sleep slyly around the even accounts; fluffily busy patterns kindle. slyly final deposits along the |
+89|10|3430|744.87| integrate slyly dolphins. bold, final frets use beside the carefully even accounts. slyly close dependencies sleep quickly carefully final pinto beans. foxes promi|
+89|10|8599|776.53|ress packages use furiously. furiously regular packages thrash blithely about the slyly pe|
+89|10|7876|417.61|nstructions: furiously even requests are quietly unusual accounts. regular requests are after the blithely regular deposits. sl|
+89|10|924|920.02|ickly unusual asymptotes after the slyly unusual accounts are carefully doggedly ironic accounts. even, final accounts use furiousl|
+90|1|8037|409.38|eas. unusual, pending packages boost quietly final accounts. slyly final packages serve. slyly even instructions sleep carefully. quickly even foxes wake quickly. |
+90|1|9683|498.43| accounts! fluffily regular deposits x-ray about the unusual, final packages. furiously final deposits alongside of the caref|
+90|1|7849|666.13|carefully ironic accounts are around the slyly bold asymptotes. carefully regular packages use furiously. ironic platelets affix carefully final accounts-- fluffily final pinto beans across the fina|
+90|1|7629|50.84|onic requests wake fluffily unusual packages. furiously even frays after the daringly pending requests wake furiously alongside of the bold requests. fluffily ironic ideas nag. ironic,|
+91|2|7986|528.64|luffily final instructions. furiously unusual foxes haggle |
+91|3|3257|906.20|ackages cajole slyly. blithely bold deposits cajole. blithely |
+91|4|483|823.21|n: slyly ironic foxes nag blithely according to the furiously bold foxes. regular, regular accounts a|
+91|5|1265|703.41| quickly silent deposits use attainments. final requests along the carefully ironic accounts wake blithely about the carefully ironic excuses. furiously bold excuses wake final, final ex|
+92|3|9337|224.01| requests are slyly along the deposits. fluffy pains alongside of the deposits |
+92|4|2246|985.03|jole enticingly regular asymptotes. carefully unusual pinto beans nag carefully ironic ideas. quickly un|
+92|5|3199|91.63|ake carefully: carefully ironic requests sleep careful|
+92|6|1044|854.89|l instructions are fluffily silently regular accounts. quickly final dolphins w|
+93|4|3008|615.98|sits promise blithely fluffily special decoys. slyly regular packages along the slyly final deposits wake accord|
+93|5|5275|376.47|ounts boost fluffily along the thinly regular realms. busily regular a|
+93|6|3869|868.81|ly among the furiously silent accounts. closely regular pinto beans nag slyly! slyly e|
+93|7|7188|805.90|y furiously bold pinto beans. express asymptotes was quickly. carefully final accounts affix slyly! platelets according to the ca|
+94|5|5433|365.56| even excuses wake carefully. quickly unusual requests wake accounts. regularly pending packages are regular |
+94|6|7784|358.08|ironic packages wake slyly carefully regular accounts. quickly regular warhorses against the blithely ironic packages haggle doggedly sly|
+94|7|7232|478.94|y regular requests. carefully final asymptotes haggle carefully against the slyly unusual requests: blithely brave grouches are fu|
+94|8|3261|824.08|quests. enticingly final accounts sleep fluffily. quickly express asymptotes around th|
+95|6|5186|291.03|ites across the blithely pending theodolites do affix across the unusual, bold Tiresias. bold packages|
+95|7|6552|456.36|tes; final, final accounts boost blithely ironic pinto beans. blithely ironic deposits cajole above the quickly pending requests? i|
+95|8|367|987.22| express requests detect furiously. requests cajole carefully|
+95|9|7379|973.74| above the furiously unusual deposits haggle ironic ideas. express, even packages haggle slyly slyly special asymp|
+96|7|5739|202.06|re. slyly regular theodolites breach slyly even dinos. fluffily regular asymptotes haggle slyly. fluffily bold courts affix furiously. regular requests |
+96|8|4942|571.30|e carefully. bold packages sleep against the furiously express requests. express foxes above the dependencies use quickly according to the slyly expres|
+96|9|9985|672.29|ecial instructions-- blithely silent theodolites play. even, silent accounts sleep. blithely silent requests haggle final, f|
+96|10|7250|587.08|efully ironic foxes. regular, final pinto beans boost above the express a|
+97|8|6371|129.77|fluffily unusual accounts. slyly regular theodolites integrate furiou|
+97|9|2390|458.34| carefully unusual pinto beans; even deposits detect furiously|
+97|10|2618|239.34|al theodolites are daringly requests. warhorses sleep blithely requests. special accounts cajole slyly deposits. a|
+97|1|4580|761.41| beans. carefully final deposits alongside of the carefully final requests haggle idly blithely ironic accounts. foxes cajole slyly against the ironic, special packages. furiously brave excuses boo|
+98|9|9486|908.21|usly final deposits mold furiously above the even deposits. carefully ironic packages across the quickly regular dolphins are slyly according to the slyly even|
+98|10|8550|657.16| sleep carefully. bravely bold somas may sleep pendin|
+98|1|3443|139.00|gular pinto beans maintain quickly fluffily regular deposits. express requests sleep. even requests after the regu|
+98|2|3759|811.55|iously. final, express packages are across the ironic dependencies. slyly thin ideas according to the even Tiresias detect furiou|
+99|10|8487|438.38|lphins affix ironic packages. blithely ironic requests nag fluffily after the slyly ironic foxes. bold dependencies boost furiously. special, |
+99|1|7567|496.93|es? permanently even excuses haggle quickly across the dependencies.|
+99|2|7970|365.83|ending accounts cajole furiously. requests promise care|
+99|3|2789|843.88|ending accounts. furiously sly packages above the carefully unusual dolphins sleep after the thinly even deposits. requests wake abo|
+100|1|7885|490.61| accounts nag slyly against the bold excuses. pearls according to the fluffily ironic accounts haggle fluffily along the quickly final platelets|
+100|2|2070|196.73| dolphins. bold deposits along the even theodolites sleep furiously about the final pinto beans. furiously unusual courts cajole about the carefully bold asymptotes. accounts integrate slyly entic|
+100|3|4994|929.57| deposits. accounts are slyly regular ideas. slyly special pinto beans upo|
+100|4|9688|22.00|uctions according to the carefully ironic deposits haggle carefully express ideas? packages across the quickly final requests c|
+101|2|5589|305.40|blithely above the fluffily pending ideas. quickly quick accounts nod ruthlessly above the carefully pending packages. slyly s|
+101|4|8263|218.71|fluffily final requests. carefully even packages wake quickly about the quickly ironic foxes. fluffily even requests hang quickly about the pending, final requests. sp|
+101|6|6324|786.53|olites sleep quickly. slyly ironic theodolites affix. furiously bold accounts integrate among the pinto beans. final ideas hang slyly along the quickly regular packages. instructions cajole.|
+101|8|55|612.09|beans against the carefully express ideas wake quickly along the quickly unusual requests. blithely regular accounts cajole fluffily. enticingly pending theodolites haggle furiously fluffily pendi|
+102|3|1384|876.75|s wake quickly. carefully express deposits wake. silent, regular requests sleep slyly after the furiously ironic deposits. slyly unusual accounts cajole|
+102|5|9137|332.71|telets are final, special deposits. silently ironic deposits wake. pending, eve|
+102|7|9156|618.00| the unusual, ironic pinto beans. theodolites above the foxes sleep slyly car|
+102|9|6942|231.02|tions haggle against the furiously ironic deposits. quickly final asymptotes haggle carefully. regular sentiments might cajole silent courts. blithely bold frays |
+103|4|5913|905.88|e across the theodolites. carefully pending escapades haggle after the ironic theodolites. furiously pending ac|
+103|6|7742|414.42|bout the bold, regular deposits; blithely even accounts are regular, even platelets-- carefully express accounts nag slyly pen|
+103|8|5164|361.48|furiously thin deposits haggle blithely. blithely regular deposits above the carefully regular accounts are slyly carefully regular packages. silent, unusual|
+103|10|429|605.20| theodolites cajole quickly above the asymptotes-- slyly special packages can haggle carefully blithely final instructions. unusual, regular ideas|
+104|5|2368|946.39|packages. final packages wake enticingly. furiously regular asymptotes are always about the carefully regular deposits. slyly regular platelets cajole carefully. final pinto beans must pro|
+104|7|6110|15.57|ending requests. carefully regular deposits use blithely. bold, ironic deposits wake slyly carefully specia|
+104|9|6269|213.89| ideas against the final accounts wake slyly regular notornis. final deposits haggle a|
+104|1|3369|729.38|ong the foxes. foxes sleep quickly? carefully regular accounts sleep. special foxes boost quickl|
+105|6|4602|27.75|lar pearls cajole never carefully even depths. blithely regular ideas are quickly. unusual asymptotes nod carefully carefully regula|
+105|8|269|158.62| unusual courts eat pending excuses. ironic, ironic requests use. bravely |
+105|10|8855|810.86|. slyly special depths sleep. bold packages engage furiously bold packages. fluff|
+105|2|8519|904.17|ding to the furiously careful ideas. dogged theodolites wake fluffily among the slyly bold ideas. blithely brave warthogs above the slyly even theodolit|
+106|7|8649|732.15| slyly ironic instructions are. bold, final accounts cajole slyly ironic pinto beans. fluffily ironic accounts around the quickly special requests use blith|
+106|9|3144|779.68| final deposits along the slyly express theodolites cajole blithely after the ironic pinto beans. furiousl|
+106|1|1411|310.40|al accounts impress. even instructions engage furiously final foxes. silently final deposits wake qui|
+106|3|2297|281.98|inal packages. pending foxes sleep bold hockey players. courts across the blithely regular packages sleep fl|
+107|8|7249|168.03|he fluffily even packages. slyly regular dependencies nag fluffily above the final, unusual foxes. final, pending foxes affix. furiously final deposits cajole quickly blithely|
+107|10|4029|91.31|integrate. requests maintain quickly. carefully regular ideas about the instructions sle|
+107|2|4667|372.94|uctions sleep doggedly final requests. express, final theodolites cajole fluffily furiously silent deposits. blithely regular requests cajole quickly regular instruction|
+107|4|7912|474.77|fluffily across the final, bold accounts. quickly regular deposits grow carefully deposits. regular requests haggle blithely. slyly special platelets boost furiously care|
+108|9|4149|558.85| of the quickly pending theodolites. fluffily unusual frays wake accounts. carefully even foxes wake slyly. carefully special pinto beans |
+108|1|4898|241.47|lthily according to the fluffy deposits. furiously silent ideas according to the furiously special theodolites wake furiously a|
+108|3|5534|626.89|instructions. blithely regular instructions according to the permanent foxes cajole blithely slyly fluffy foxes. slyly regular asymptotes cajole foxes. slyly unusual deposits |
+108|5|3142|922.27|slyly express accounts are fluffily along the blithely unusual packages. pinto beans mold furiously. furiously bold instructions are blithely deposits. quickly special accounts detect t|
+109|10|524|275.19|st the permanently final requests. carefully pending pinto beans haggle quickly slyly ironic dolphins. blithely bold deposits wake blithely. even requests cajole foxes. iro|
+109|2|4470|992.21|ake furiously packages. blithely even foxes haggle furious|
+109|4|8176|936.60|d the express accounts. even theodolites wake quickly up the furiously bold foxes. furiously regular packages use regular, bold|
+109|6|7524|374.49|sual requests. final pinto beans cajole furiously. dependencies integrate slyly even excuses. blithely special requests wake around the slyly final pinto beans. furiously pending requests wake furi|
+110|1|2782|566.46|ly sly deposits. regular deposits are; theodolites haggle furiously bold foxes. final pinto beans wake blithely furiously ironic depths. unusual, regular platelets cajole. final, ironic|
+110|3|8914|183.47|yly even foxes. carefully regular requests doubt. pending, regular requests across the blithely final theodolites hag|
+110|5|1160|932.08|ake blithely. furiously ironic accounts sleep fluffily|
+110|7|4927|196.63|theodolites. instructions haggle even packages. waters impress furiously quickly express courts. furiously ironic depths nod quickly? ironic, ironic requests sle|
+111|2|1890|321.97|st foxes. daring depths use above the furiously special ideas. ironic foxes among the carefully final theodolites are alongside of the regular depths. e|
+111|5|9587|978.65|express instructions against the furiously final grouches haggle across the blithely ironic theodolites. slyly special dependencies in place of the carefully pending |
+111|8|8069|745.33|thely bold requests. unusual packages sleep. quickly pending ideas nag furiously to the carefully final p|
+111|1|2175|496.08|s around the daringly final pinto beans use furiously against the pi|
+112|3|8509|111.42|unts. carefully ironic instructions are final, bold foxes. bold excuses run according to the unusual packages. theodolites cajole carefully according to the fluffily pending deposits? sly|
+112|6|7652|783.52| carefully among the furiously final packages. regular instructions nag. slyly pending ideas hang fluffily blithely ironic instructions. ironic deposits haggle except the quickl|
+112|9|4835|913.75|ach slyly special dependencies. furiously ironic pinto beans sleep slyly pen|
+112|2|1211|815.88|l requests integrate furiously. quickly quiet packages are carefully regular accounts. regular packages eat quickly express, ironic sheaves. quickly dogged accounts o|
+113|4|9981|396.26|ithely express pains lose bravely fluffily pending foxes. blithely ironic sauternes cajole q|
+113|7|3804|860.68|ully about the carefully even accounts: thinly even foxes are carefully. |
+113|10|522|981.41| warthogs use quickly alongside of the furiously unusual requests. final deposits are blithely acro|
+113|3|4692|141.48| blithely unusual gifts snooze against the quickly ironic packages. regular packages across the carefully regular packages bo|
+114|5|7146|447.24|ideas will nag regular accounts! carefully final requests cajole furiously quickly final tithes. furiously express instructions a|
+114|8|3062|555.12|ts. furiously regular requests run carefully thin decoys. ironic platelets sleep alongside of the slyly silent deposits. reg|
+114|1|4519|382.87|nts. ironically express dolphins dazzle blithely. special instructions wake carefully along the ideas. quickly special dolphins sleep. furiously pendi|
+114|4|2113|570.79|o beans sleep among the ironic excuses. furiously even sheaves are. never regular instructions nod.|
+115|6|1817|82.84|uffily final accounts integrate furiously along the carefully busy excuses. slyly even asymptotes doubt quickly. fluffily thin theodoli|
+115|9|983|867.45|kly. requests nag after the blithely bold packages. express requests cajole theodolites. blithely express requests sleep after the furiously regular accounts. fluffily r|
+115|2|7781|861.93|lyly ironic pinto beans affix alongside of the furiously even ideas: quickly bold warhorses sle|
+115|5|3002|81.52|efully after the quickly regular deposits. daringly pending ideas sleep even ideas. silent, re|
+116|7|6580|705.50|iously. slyly regular requests detect slyly. carefully bold packages sleep furiously carefu|
+116|10|4975|413.86|tions. regular excuses detect. ideas haggle slyly about the slyly ironic courts. ironic foxes solve. ideas affix fluffily after the special, even dependencies. final platelets according|
+116|3|8679|866.56|aphs cajole blithely regular accounts. even packages doubt; bold instructions boost quickly. fluffi|
+116|6|5632|37.30|ccounts about the special packages nag across the carefu|
+117|8|5906|706.51|into beans sleep carefully blithely bold packages. even, bold instructions use across the carefully e|
+117|1|1986|322.08|eposits. special pinto beans use fluffily across the furiously regular pinto beans. furiously regular epitaphs nag fluffily packages. special accounts a|
+117|4|2577|761.86|riously. doggedly unusual ideas boost blithely blithely regula|
+117|7|4762|552.88| run furiously ironic accounts. slyly ironic deposits haggle slyly fluffy requests. flu|
+118|9|694|744.73|refully slow requests. requests against the special pac|
+118|2|6326|325.61| packages. express, final frays affix quickly above the final asymptotes. carefully regular requests doubt quickly f|
+118|5|7806|283.27| accounts affix carefully. regular, regular packages among the brave, pendin|
+118|8|4951|976.55|s orbits. even asymptotes above the instructions wake fluffily according to the sly, final excuses. express deposits across the blithely ironic depend|
+119|10|2307|473.64| blithely unusual dolphins boost busy, express ideas. regular requests use carefully furiously ironic deposits. carefully regular packages would sle|
+119|3|1452|676.92|ular instructions was slyly. furiously bold gifts boost f|
+119|6|4955|488.93|ias are along the express requests. fluffily pending ideas nag idly against the fluffily bold instructions? foxes cajole quickly. slyly special deposits haggle slyly e|
+119|9|583|782.47|yly pending requests-- carefully special instructions haggle carefully even instructions. blithely regular theodolites detect blithely final ideas. blithely ironic deposits among the sl|
+120|1|4976|201.21|inal, regular pinto beans haggle carefully! ironic ideas unwind among the slyly regular theodolites. regular platelets kindle blith|
+120|4|7744|365.79|l, special escapades! ideas sleep slyly instructions. carefully bold requests are. even accounts cajole. final accounts use slyly |
+120|7|5329|249.61|s cajole blithely. carefully bold requests believe blithely? brave accounts above the pending, dog|
+120|10|3102|566.34|ctions; realms beside the blithely final theodolites unwind blithely packages. regular dolphins sleep carefully-- carefully express accounts wake quickly. pending depths use never courts.|
+121|2|9741|147.45|ly according to the carefully regular asymptotes. silent excuses cajole carefully against the never silent instructions. furio|
+121|6|4246|850.42|usly final instructions. theodolites are according to the permanently ironic accounts. carefully pending accounts haggle about the pending instructio|
+121|10|7670|449.39|carefully daring packages. express packages use carefully about the quickly unusual packages. special ideas along |
+121|4|8709|655.22| detect carefully along the carefully even pinto beans. gifts haggle: ideas sleep ar|
+122|3|1019|451.29| blithely regular accounts. blithely final pains sleep besides the blithely final warhorses. furiously unusual requests haggle furiously|
+122|7|2490|637.28|efully special excuses grow slyly unusual packages. carefully quiet as|
+122|1|4957|650.84|quests. quickly bold requests nag across the furiously ironic accounts. ironically express instructions detect slyly carefully ironic requests. even, un|
+122|5|2083|739.25|counts. unusual requests alongside of the regular requests are carefully stealthy instructions? regular sauternes cajole. final theodolites breach carefully at the blithely final idea|
+123|4|9881|107.03|fully bold deposits detect slyly pending instructions. slyly special ideas detect blithely. slyly fluffy instructions hinder|
+123|8|5638|818.19|thely even pinto beans. furiously regular asymptotes affix furiously. regular, ironic tithes integrate quickly. blithely regular requests breach finally. decoys alon|
+123|2|2692|217.01|he ironic accounts nag fluffily after the bold, pending theodolites. blithely final ideas sleep carefully according to the blithely ironic foxes. regular requests are. furiousl|
+123|6|5311|149.65|eposits cajole according to the carefully pending packages. furiously final epitaphs solve alongside of the even requests|
+124|5|7102|901.98|ily accounts. furiously busy theodolites above the deposits thrash above the blithely final foxes. express instructions nod slyly furiously busy packages. special asymp|
+124|9|3969|908.64|l epitaphs. packages cajole among the furiously regular requests. closely|
+124|3|9295|882.54|s along the accounts poach quickly ironic deposits. even, final excuses thrash carefully about the express, special pains. carefully careful accounts breach slyly|
+124|7|9416|822.78|ously. theodolites affix around the slyly bold packages. even, ironic packages are carefully pains. furiously unusual requests sleep blith|
+125|6|2263|358.45|e. ironic, regular requests cajole fluffily along the even ideas. final ideas wake blithely. blithely bold |
+125|10|8276|668.65|nd the carefully express requests. slyly regular requests haggle. blithely unusual platelets solve fluffily fluffily regular|
+125|4|2600|455.70|ounts. thinly special accounts cajole carefully. even, special accounts after|
+125|8|5546|806.66| to the unusual courts are deposits! final, final pinto beans solve slyly. ironic accounts boost fluffily. furiously pending d|
+126|7|2647|221.89|lyly final pinto beans across the regular, even courts use slyly slyly pending braids! unusual requests along the furious|
+126|1|2373|194.38|fter the ideas. blithely daring sheaves print furiously among the blithely final packages. iron|
+126|5|1532|451.61|refully alongside of the quickly bold excuses. enticing, bold |
+126|9|5458|929.43|leep to the furiously special accounts. furiously final courts |
+127|8|7658|712.33|al pinto beans! slyly ironic excuses boost after the packages. express foxes integrate carefully. pending, regular theodolites |
+127|2|1467|237.98|regular accounts! quickly ironic packages haggle according to the accounts. carefully ironic |
+127|6|8281|3.14|ts above the furiously pending asymptotes cajole after the deposits. slyly ironi|
+127|10|8894|73.42|fter the sometimes special courts sleep about the slyly unusual reque|
+128|9|6982|425.29|ironic asymptotes. fluffily ironic packages use. ironic, regular ideas are in place of the quickly silent deposits. final, bold gifts across the ironic, regular pac|
+128|3|7602|224.49|xcuses. blithely unusual theodolites use slyly carefully even warthogs. slyly even dugouts haggle slyly final, express pinto beans. furiously bold packages thrash requests? slyly unusual packages |
+128|7|3766|947.16|arefully regular packages boost regularly. accounts are according to the blithely even dependencies. slyly silent accounts doubt slyl|
+128|1|7023|875.78| furiously quickly regular pinto beans. always special requests are. quickly regular deposits are furiously. slyly unusual theodolites haggle evenly; furiously special deposits wa|
+129|10|5721|129.69|ully express requests above the ironic, final requests cajole slyly along the quickly special packages. sl|
+129|4|7242|200.26|es across the furious escapades wake quickly slyly e|
+129|8|5299|330.59|final sentiments affix atop the silent foxes. busy pinto beans cajole. slyly final pinto beans haggle against the carefully expres|
+129|2|1968|27.22|ealthy, ironic deposits. slyly ironic pinto beans are blithely pinto beans. blithely ironic |
+130|1|4928|223.38|ths. slyly even theodolites detect according to the slyly final courts. carefully unusual deposits ar|
+130|5|6909|275.58|lly unusual accounts try to boost along the special packages. furiously bold requests x-ray blithely ironic waters. slyly unusual orbi|
+130|9|4850|442.81|ully regular deposits snooze. slyly silent foxes detect furiously furiously bold requests. slyly regular accounts breach. blithely bli|
+130|3|7387|883.99|aggle furiously. even ideas hinder deposits. even, final ideas are. unusual theodolites after the special, express foxes haggle carefully pending accou|
+131|2|3263|211.70|sits sleep quickly regular multipliers. slyly even platelets cajole after the furiously ironic deposits. slyly ironic requests should have to cajole: bl|
+131|7|125|861.84|l accounts grow quickly-- slyly ironic requests haggle? quickly express pinto bean|
+131|2|5138|572.43|grouches run with the carefully even packages. ironic, even deposits run slyly along the packages. special dependencies among the regular |
+131|7|8945|613.09| are carefully along the quickly final theodolites. packages after the quickly pending package|
+132|3|3092|687.29|y special decoys against the ideas affix against the sly|
+132|8|1904|925.73|the regular foxes wake ironic deposits. ironic, special requests use blithely instructions! final requests hang. blithely regular deposits haggle. ir|
+132|3|7441|357.06|ests. furiously unusual requests wake furiously. quickly unusual depos|
+132|8|5303|353.06|ep blithely after the sly accounts. slyly express dolphins cajole amon|
+133|4|5727|49.17|boost blithely across the ironic, regular instructions. packages use slyly unusual requests. bold accounts above the fu|
+133|9|404|478.18|ly ironic requests run instead of the blithely ironic accounts? regular ideas use fluffily: even, express packages sleep abov|
+133|4|4568|57.48|dolites. ironic accounts are blithely pinto beans. regular pinto beans haggle beneath|
+133|9|2813|277.26|s. pending, final accounts haggle blithely furiously pending deposits! carefully unusual attainments integrate. blithely bo|
+134|5|8879|848.14|lites. slyly final foxes after the bold requests cajole carefu|
+134|10|9013|102.99|pendencies. furiously express warthogs cajole furiously ironic, regular asymptotes. bold deposits boost among the furiously even theodolites. regular instructions integrate carefully |
+134|5|852|927.45| sleep unusual, express packages. unusual sentiments are furio|
+134|10|6270|388.28| to the furiously pending deposits nag along the slyly express asymptotes. slyly silent accounts shal|
+135|6|6940|465.82|ding foxes cajole. even dugouts haggle busily. fluffily pending packages about the express excuses boost slyly final packages. blithely express ideas cajole about the carefu|
+135|1|2443|9.83|atterns. pending, special deposits are furiously. express, regular deposits integrate quickly. unusual gifts cajole blithely stealthily pending deposit|
+135|6|7453|698.42|ven accounts. slyly final instructions nag slyly around the regular, unusual packages. slyly sp|
+135|1|2771|306.43|old deposits. furiously express instructions boost. pending dolphins use requests. slyly regular packages cajole quickly final ideas. pending, regular ideas nag carefully even, express pla|
+136|7|2237|548.19|ond the silent accounts haggle above the blithely regular packages|
+136|2|6068|806.19|structions. ironic theodolites haggle according to the final, daring pearls. carefully ironic somas are silently requests. express pa|
+136|7|8979|387.57|ans. express pinto beans wake carefully among the slyly ironic foxes: carefully final pinto beans haggle blithely. pending, final deposits promise furiously|
+136|2|9617|525.81| across the carefully pending warthogs. close, regular packages are quickly after the never ironic foxes. accounts sleep quickly along the furiously regular re|
+137|8|9057|302.26|slyly about the regular instructions. even, ironic theodolites use carefully around the even decoys. unusual, pending dolphin|
+137|3|4078|441.11|packages. blithely unusual sentiments should are. furiously regular accounts nag quickly carefully special asymptotes! idly ironic requests dazzle bold requests. carefully expres|
+137|8|467|371.85|ly special accounts detect carefully. furiously ironic deposits nag express packages. slyly quiet |
+137|3|7850|187.31|atelets sublate fluffily. enticingly unusual packages boost according to the blithely ironic foxes. pending requests mold sly|
+138|9|133|576.96|regular, final deposits maintain slyly even requests. regularly furious deposits use above the stealthy requests. ironic deposits are. carefully final frays are carefully. carefu|
+138|4|2535|885.35|lar deposits. courts sleep carefully. furiously express ideas boost furiously after the final, regular foxes. furiously bold deposits are. express accounts haggle blithely. |
+138|9|7907|119.83|epitaphs? quickly express foxes use pending accounts. special packages cajole blithely among the quickly unusual accounts? boldly ironic packages across the slyly ironic senti|
+138|4|967|309.03|pendencies integrate against the unusual pains. carefully unusual theodolites wake quickly across the deposits. blithely regular deposits alongside of the carefully regular deposits|
+139|10|2886|285.75|fully ironic requests according to the quickly final idea|
+139|5|9255|684.61|ickly furiously regular excuses. boldly express deposits sleep. ideas nag above the silent dependencies. slyly regular packages wake furiously. requests are carefully. quickly final fox|
+139|10|1042|972.23|gular, regular theodolites. regular asymptotes haggle carefully according to the permanently even deposits. slyly special account|
+139|5|3285|690.00|xpress pains. quickly regular ideas after the special, bold excuses wake furiously final ideas. slyly bold accounts nag packages. ironically regular|
+140|1|2379|501.05|of the silent, bold courts. slyly regular dependencies haggle. fluffily special deposits cajole carefully. quickly ironic depos|
+140|6|3533|781.45|ayers. carefully ironic pinto beans nod carefully furiously regular pinto beans. slyly ironic requests after the carefully regular packages are about the blithel|
+140|1|304|45.84|ing requests. carefully unusual foxes are final requests. slyly regular accounts wake permanently. quickly ironic theodolites hagg|
+140|6|7346|429.52| special pinto beans wake carefully unusual warthogs! furi|
+141|2|6776|293.63|fluffily unusual courts sleep. close pinto beans haggle quickly after the carefully ir|
+141|8|1660|139.18|egular accounts. enticingly bold theodolites eat slyly across the never ironic platelets. theodolites wake bli|
+141|4|7628|838.08|sly about the pinto beans. blithely ironic ideas sleep. foxes are quietly among the pinto beans. carefu|
+141|10|90|810.68|e doggedly regular ideas. foxes haggle slyly. slyly regular theodolites across the carefu|
+142|3|9219|897.49|thlessly special requests sleep blithely about the bold deposits. express, ironic instructions wake. final packages are blithely. deposits are carefully furiously even deposits. furiously regular a|
+142|9|13|334.33|are blithely blithely brave requests. slyly regular theodolites are furiously. blithely ironic dependencies haggle blithely. furiously unu|
+142|5|3076|860.55|gular requests about the pending packages wake furiously dogged accounts. th|
+142|1|3858|854.08|efully special deposits. blithely bold pinto beans haggle. slyly final ideas boost blithely. finally special requests mold along the blithely express packages. entic|
+143|4|7326|960.64|the slyly pending requests cajole quickly blithely regular platelets. even requests boost carefully. ironic, final instructions above the regular courts boost a|
+143|10|3923|741.01|le quickly furiously silent ideas. carefully regular requests ar|
+143|6|7152|772.24|fully furious accounts. final asymptotes cajole regular requests. carefully regular courts are quickly. slyly ironic ideas above the carefully regular requests wake|
+143|2|1952|199.37|l accounts are quickly after the unusual packages. regular accounts wake among the quickly even accounts. even, ironic|
+144|5|6295|457.37| pinto beans promise across the blithely bold packages. express, regular accounts play around the slyly silent deposits. specia|
+144|1|494|849.96|uriously ironic pearls wake idly furiously even pearls. foxes impress slyly busily express requests. carefully slow somas wake quick|
+144|7|1799|713.88|yly final requests. packages are. carefully daring accou|
+144|3|5427|361.83| foxes integrate carefully. deposits cajole fluffily. pending deposits kindle slyly carefully regular packages. even, thin accounts according to th|
+145|6|11|641.67| slyly regular packages are slyly carefully special dolphins. unusual braids use furiously about the final courts. slyly special|
+145|2|3838|568.91|ss, final asymptotes are. furiously express accounts run. furiously express dependencies eat carefully blithely ironic theodolites. closely ironic foxes among the silent asymptotes cajole|
+145|8|1344|422.60| dependencies. even patterns detect slyly after the ironic deposits. ironically even ideas wake slyly. even packages against the blithely express accounts haggle furiously carefully regular|
+145|4|4332|894.57|are carefully above the quickly silent deposits. evenly bold reque|
+146|7|2726|231.15|uriously after the fluffy accounts. furiously bold deposits cajole. requests might engage. quick accounts wake carefu|
+146|3|4628|123.91|ly across the dependencies. daringly ironic deposits are furiously; requests are. quickly regular accounts hang. carefu|
+146|9|5893|858.59|sual instructions believe. fluffily unusual warhorses nag. unusual dependencies sleep. slow sheaves haggle furiously. carefully ironic dependencies cajole slyly against the accounts. |
+146|5|9687|882.37|packages? ideas affix slyly even accounts: express requests wake slyly carefully special depths. ironic in|
+147|8|7442|939.14|sts against the furiously unusual instructions integrate ironic accounts. slyly final pinto beans sleep blithely carefully final asymptotes. slyly ironic warhorses befor|
+147|4|7647|102.19|refully regular orbits about the furiously express asymptotes haggle carefully according to the blithely regular ideas. blithely express excuses around the furiously |
+147|10|1596|466.37|ole. slyly final packages do haggle quickly. unusual accounts across the pending pinto beans was furiously according to the furiously brave deposits. pending deposits along the regular request|
+147|6|4235|235.91|ar pinto beans. regular instructions sleep carefully after the furiously blithe accounts. slowly pending ideas could nag careful, even accounts. attainments use slyly quickly|
+148|9|8007|177.40|final requests-- slyly regular theodolites haggle carefully across the blithely final dependencies. slyly even requests about the carefully even accounts sleep |
+148|5|1904|774.56|y even pinto beans. fluffily ironic packages sleep slyly. permanently brave requests boost furiously packages. boldly ironic deposits across the carefully bold pinto b|
+148|1|5393|264.09|ses. slyly pending packages haggle fluffily fluffily even instructions. fluffily regular packages are carefully about the furiously even asymptot|
+148|7|2957|250.98|. requests boost above the bold, special foxes. blithely regular platelets serve blithely slyly final ideas. carefully special idea|
+149|10|959|679.10|y to sleep carefully ironic requests. even, regular dependencies haggle. slyly unusual foxes haggle along the instructions. quickly even accounts nag furiously special accoun|
+149|6|7283|201.03|usly bold instructions. regular, final deposits alongside of the furiously ironic platelets are slyly even instructions. carefully bold accounts are. ironic, regular requests nag furious|
+149|2|7392|266.53|es detect along the regular instructions. bold ideas boost slyly. quickly unusual accounts doubt. carefully even foxes thrash slyly silent, ironic dolphins: Tiresias must wake |
+149|8|4104|312.37|ly express excuses. bold pinto beans boost blithely across the bold, final pinto beans. final deposits haggle carefully from the|
+150|1|8091|524.71|sleep furiously furiously bold warthogs. furiously express gifts according to the regularly silent sentiments boost within the f|
+150|7|2721|814.79|dependencies. special accounts wake carefully furiously regular accounts. regular accounts haggle along the express instructions. express pinto beans along the express, bold deposits run |
+150|3|3172|33.71| about the silent ideas. fluffily final requests impress. slyly final requests wake carefully about the slyly express foxes. slyly regular warthogs sleep fur|
+150|9|1640|327.34|slyly even deposits alongside of the furiously even accounts detect boldly quickly regular accounts. final accounts kindle carefu|
+151|2|391|281.90|dolites. boldly ironic packages cajole fluffily regular instructions. regular, ironic accounts are blithely. ironic accounts are alongside of th|
+151|9|253|840.06| haggle. somas are carefully. slyly regular requests sleep blithely atop the thinly express deposits. stealthily express packages cajole daringly express requests. carefully special requests after t|
+151|6|1484|71.68|riously final requests sleep according to the regular deposits? slyly ironic ideas wake furiously. quickly even theodolites use fluffily. regular, unusual courts according to the regular |
+151|3|9417|244.06| foxes along the hockey players are slyly about the blithely even packages. unusu|
+152|3|1396|164.60|d the instructions. carefully pending accounts haggle fluffily ruthless instruc|
+152|10|2653|432.12|carefully pending requests. quickly ironic requests haggle carefully special theodolites. blithely special requests aga|
+152|7|3599|77.38| quick excuses according to the pending, ironic requests snooze carefully slyly even foxes: slyly regular instru|
+152|4|1283|142.73|olites above the furiously even requests dazzle blithely against the busy, regular pains. furiously blit|
+153|4|4695|539.86|atelets. dolphins haggle blithely carefully ironic deposits? express, final accounts wake about the requests. even deposits should use quickly. regular,|
+153|1|8464|680.14|cording to the final instructions. carefully fluffy asymptotes haggle carefully |
+153|8|2916|685.52|ully express deposits boost daringly packages. furiously ironic accounts sleep slyly ironic instructions. special deposits integrate blithely. |
+153|5|6397|285.92|furiously special platelets haggle quickly even, bold pinto beans. blithely close pinto beans boost around the furiously regular packages. quickly express requests cajole.|
+154|5|3961|474.19|quickly pending requests nag express dependencies. furiously unusual requests about the regular, pending packages wake according to the ironic packages! theodolites wake about the unusual, regula|
+154|2|3231|829.29|ins along the packages use carefully requests. furiously unusual packages kindle fluffily quick|
+154|9|7315|960.19|uickly regular dolphins ought to believe among the q|
+154|6|1682|160.31|refully except the sly, even requests. careful ideas haggle after the slyly regular foxes: slyly special packages at the slyly regular deposits wake carefully theod|
+155|6|2893|222.02|. bold packages are toward the silent pinto beans. quickly fin|
+155|3|7077|413.24|lar instructions against the furiously unusual instructions breach furiously for the bold, even platelets. ironic accounts must have to are quickly across the |
+155|10|3029|413.02|the carefully ironic asymptotes. even, unusual accounts sleep furiously about the blithely regular ideas. quickly re|
+155|7|977|751.45|quickly silent deposits doubt above the unusual instructions. special r|
+156|7|4755|453.69|e of the excuses. slyly even theodolites boost about the slyly final foxes? requests after the carefully regular platelets sleep above the furiously pending d|
+156|4|7228|994.19|odolites wake quickly slyly final dinos. requests cajole slyly along the instructions. furiously regular deposits cajole slyly blithely ironic instructions. instructions wake. blithely even pinto be|
+156|1|3043|252.66|lithely express, silent decoys. bold, special requests along the carefully even accounts|
+156|8|3191|9.87|ar instructions-- quickly special deposits wake fluffily about the blithely e|
+157|8|5414|369.44|ong the carefully bold ideas boost across the regular, ironic requests. ironic fo|
+157|5|763|568.46|cial packages boost along the ideas. packages sleep slyly express packages. ironic, bold requests|
+157|2|3718|901.53|, ironic foxes. blithely even foxes wake about the carefully special req|
+157|9|3400|288.41|encies integrate carefully even accounts. regular, regular sentiments are against the slyly regular deposits-- even, even ideas use inside the carefull|
+158|9|2845|408.72|y. slyly final pinto beans believe fluffily pending, regular deposits. final, unusual ideas according to t|
+158|6|8715|452.31|instructions along the ironic, final requests are fluffily regular deposits. regular deposits cajole carefully about the silent instructions|
+158|3|4465|837.16| wake carefully in place of the furiously express deposits. slyly regular instructions engage. fluffily f|
+158|10|4251|431.90|etly special accounts boost carefully final multipliers. carefu|
+159|10|9200|356.66|ccording to the furiously final accounts. carefully fluffy foxes wake idly against the quickly final requests. evenly even pinto beans must have to are against the carefully regular de|
+159|7|3585|629.29|g platelets wake furiously slyly bold deposits? slyly regular accounts across the stealthily ironic accounts cajole along the special, ironic pearls. fluffily regular pinto|
+159|4|6085|171.40|ross the blithely special deposits are quickly carefully ironic Tiresias. quickly regular deposits was furiously. unusual accounts affix blithely about the regular deposits. asymptotes ab|
+159|1|3717|790.87|y blithe dependencies. final accounts haggle furiously. even, special asymptotes|
+160|1|2434|525.73|lithely. furiously silent theodolites after the ca|
+160|8|8324|999.93|ly final instructions. closely final deposits nag furiously alongside of the furiously dogged theodolites. blithely unusual theodolites are furi|
+160|5|6034|733.59| furiously against the final instructions. silent accounts sleep blithely after the boldly final requests. ex|
+160|2|6872|872.20|ions are carefully. carefully express foxes nag slyly before the carefully final excuses. accounts after the furiously ironic packages are furio|
+161|2|9365|790.03|scapades. packages use. slyly final accounts haggle across the quickly final th|
+161|10|8421|394.05|cial ideas. ironic instructions eat blithely slyly special packages. furiously final packages alongside of the furiously final instructions boost carefully against the quickly |
+161|8|9679|688.47|ns. blithely express requests sleep slyly foxes. blithely unusual ideas |
+161|6|679|893.72| the fluffily final requests. ironic, pending epitaphs affix slyly. qui|
+162|3|315|923.04| ideas. carefully final dugouts will have to wake quickly regular asymptotes. express grouches unwind carefully after the regula|
+162|1|2604|104.20|usly regular excuses. silent, even sheaves are according to the regular requests. packages grow blithely slyly regular accounts. ca|
+162|9|7014|236.07|cording to the stealthily fluffy theodolites. carefully unusual excuses around the regular deposits cajole slyly amo|
+162|7|4381|824.36|as across the furiously ironic notornis print blithely alongside of the final, pending deposits. fluffily express deposits slee|
+163|4|9241|955.81|cial dolphins. furiously bold foxes could have to use. never sly accounts cajole fluffily about the unusual, special pinto beans. pending, even requests around the quickly special deposits use f|
+163|2|3427|499.51|ithely bold packages integrate slyly quiet pinto beans. carefully even deposits boost slyly about the furiously fluffy packages. evenly regular dependencies wa|
+163|10|5323|920.75|sly even theodolites against the carefully bold packages wake final pinto beans. furiously pending deposits dazzle furiously. blithely exp|
+163|8|9676|573.48|ending accounts haggle blithely ironic, even packages. carefully pending packages wake carefully across the ruthlessly pending accounts! pinto beans wake. slyly final deposits boost slyly. fluffily|
+164|5|1295|341.95| bold instructions cajole slyly ironic deposits. quickly ironic foxes are carefully final, bold theodolites. ironic deposi|
+164|3|2134|84.02|ns believe. carefully express theodolites impress. carefully fina|
+164|1|3245|814.67|brave accounts cajole according to the final platelets. furiously final dolphins across the furi|
+164|9|3028|64.89|fully furiously regular requests. furiously bold orbits serve about the regular packages? carefully final deposits p|
+165|6|4424|943.82|ular requests. regular accounts cajole against the blithely ironic deposits. blithely even packages cajole. furiously final deposits cajole. thinly pending deposits hagg|
+165|4|5534|717.83| quickly regular deposits above the fluffily thin deposits haggle furiously against the quickly final depend|
+165|2|3780|730.28| furiously quickly regular foxes. pending requests engage evenly blithel|
+165|10|6114|210.84|foxes. foxes haggle. dolphins use carefully according to the fluffily regular packages. blithely special accounts according to the slyly final frets breach blithely after the care|
+166|7|6527|309.00|lly. dependencies haggle carefully at the slyly special packages. regular, final packages|
+166|5|6508|714.49|y express deposits cajole furiously above the carefully even theod|
+166|3|9364|581.52|pinto beans. pinto beans cajole furiously carefully special requests-- quickly |
+166|1|6713|631.58| sleep carefully. quickly even deposits run carefully fluffily ironic orbits. ironic deposits wake furiously. close sheaves along the special packages sleep carefully special instr|
+167|8|4933|666.70|ular deposits among the even dolphins are quickly express accounts. final, ironic theodolites cajole closely. th|
+167|6|5789|524.27| are furiously final, even dugouts. ironic, regular packages nag fu|
+167|4|4756|336.75|es are carefully along the carefully express tithes. furiously even deposits cajole slyly slyly regular deposits. bold excuses about the carefully ironic requests sleep blithely instructions|
+167|2|6748|704.97|t the silent ideas are blithely carefully even packages; blithely|
+168|9|347|394.83|hely blithely final theodolites. blithely final deposits among the quickly even ideas haggle about the blithely bold d|
+168|7|1281|771.90|, pending packages. ironic pinto beans use carefully. fluffily bold deposits|
+168|5|9089|508.37|ests are always. regular ideas sleep fluffily; special, express instructions cajole slowly. pending platelets boost furiously against the bold, even instructions. bold instructi|
+168|3|7519|963.43|requests above the quickly regular deposits use carefully aft|
+169|10|6914|619.53|uickly along the dependencies. furiously pending notornis cajole at the carefully special attainments. carefully ironic packages impress slyly care|
+169|8|6589|947.03|gside of the quickly regular asymptotes. quickly even theodolites against the theodolites promise express requests. ironic accounts wake careful|
+169|6|6731|713.35| the quickly special excuses wake blithely alongside of the carefully silent accounts. regular dolphin|
+169|4|7691|476.19|slyly alongside of the warthogs. fluffily even instructions poach under the slyly pending packages. blithely silent deposits use across the fur|
+170|1|7516|581.65| pinto beans. unusual ideas was fluffily. excuses cajole carefully final dependencies. platelets nag quickly according to the furiously ironic requests. carefully regular dependenci|
+170|9|838|667.16|orges do sleep furiously. fluffily furious requests among the final requests sleep after the slyly bold ideas? regular pinto beans might ha|
+170|7|6498|251.19| fluffily regular accounts integrate. blithely even packages cajole fluffily. furiously ironic excuses haggle by the finally final requ|
+170|5|6593|202.07|ep blithely final packages. quickly bold pains cajole carefully across the somet|
+171|2|8217|859.60|ress deposits. carefully special requests are furiously final requests. accounts cajole carefully blith|
+171|1|2311|864.96|s are along the blithely final deposits. regular asymptotes nag slyly against the requests. accounts cajole carefully carefully |
+171|10|8561|22.69|y close ideas are quickly silently regular packages. even, silent requests wake against the slyly special dependencies; regular accounts sleep doggedly furiously final pinto beans. slyly unusual pac|
+171|9|7589|935.29|s above the theodolites wake slyly along the carefully unusual dependencies. carefully express theodolites a|
+172|3|9799|184.96|ts. slyly even asymptotes nag blithely regular accounts. final platelets cajole furiously slyly bold packages. ironic accounts sleep slyly. pendi|
+172|2|8333|920.74|ronic foxes. quickly unusual accounts cajole blithely. blithely bold deposits cajole. blithely close pinto beans cajole requests. quickly express excuses around the quickly even deposits nag agai|
+172|1|3589|437.86|posits should have to boost furiously near the unusual ideas. final packages cajole blithely. carefully final deposits boost carefully. carefully special attainments boost quickly af|
+172|10|1661|687.13|y among the slyly even requests. ideas according to the slyly pending dinos print quickly slyly ironic foxes. pending, even excuses dazzle car|
+173|4|2536|353.84|ons-- final, silent dependencies sleep across the special, special excuses. furiously even accounts must have to mold after the ironic accounts. reque|
+173|3|8307|70.22|alongside of the furiously even packages. furiously final requests snooze blithely alongside of the carefull|
+173|2|6050|683.78|e after the slyly regular ideas. unusual pinto beans cajole even asymptotes-- silent, stealthy requests after the even accounts haggle blithely regular instructions. slyly ev|
+173|1|6162|877.84|es. slyly bold requests after the blithely regular dependencies cajole slyly even ideas. unusual deposits integrate about the final somas. |
+174|5|2103|681.95|sual, express requests wake furiously ruthless, final accounts. carefully ironic somas dazzle furiously. unusual asymptotes sleep-- patterns about the furiousl|
+174|4|6795|143.48|regular theodolites. special accounts integrate across the carefully ironic Tiresias. blithely even platelets detect. foxes about t|
+174|3|111|135.46| express packages-- quickly unusual courts lose carefully requests. bold accounts solve about the theodolites; pinto beans use. ironic foxes|
+174|2|8404|126.20|nding accounts mold furiously. slyly ironic foxes boost express sheaves. daringly final packages along the stealthy dependencies are blithely ironic requests. furiously pending pin|
+175|6|5515|487.68|ages sleep against the Tiresias. slyly pending packages print slyly above the evenly ironic dolphins. furiously ironic packages use f|
+175|5|7522|784.93| affix. quickly final theodolites haggle furiously after the slowly even pinto beans. furiously final packages use slyly. slyly regular reque|
+175|4|8501|706.61|int above the instructions. furiously regular requests integrate blithely according to the instructions. slyly pending foxes are asymptotes. slyly ruthless accounts wake. r|
+175|3|9456|978.56| regular packages. carefully ironic packages use. blithely ironic accounts among the pending, |
+176|7|7180|179.09|riously final requests. accounts doubt blithely regular somas. slyly even platelets are. theodolites across |
+176|6|3589|157.38|inal excuses. express deposits haggle carefully even deposits. carefully unusual requests haggle along the fluffily bold deposits. even, final requests affix. furi|
+176|5|5407|947.51|ending accounts eat carefully instructions. carefully pending packages detect slyly express accounts. foxes wake fluffily across th|
+176|4|1783|861.63|g the carefully special platelets. dogged, ironic asymptotes wake requests. regular excus|
+177|8|1239|44.75|requests use furiously regular, final requests. regular requests on the pending, ironic deposits use slyly among the excuses. carefully regular sheaves are.|
+177|7|4349|63.36|osits sleep among the fluffily unusual instructions. ironic dolphins cajole. furiously bold deposits sleep carefully. even, unusual accounts|
+177|6|9872|252.42|sual platelets. bold foxes affix furiously. pending, pending accounts lose furiously. pending platelets along the unusual, even foxes wake regular, even theo|
+177|5|4727|859.82|es are. slyly ironic packages haggle around the slyly bold deposits. bold foxes haggle blithely. f|
+178|9|4231|558.56|deposits. patterns use against the furiously unusual accounts. accounts wake carefully above the careful|
+178|8|1919|362.26| ironic dependencies. blithely regular packages detect fluffily special theodolites. regular instructions poach-- ironic deposits along the final requests |
+178|7|6836|864.93|y. ideas integrate regular pinto beans. special foxes wake above the slyly ironic asymptotes. quickly ironic ideas sleep. silent dependencies against the slyly bold packa|
+178|6|6922|475.18| regular patterns. fluffily express accounts about the furiously bold deposits cajole slyly about the furiously silent foxe|
+179|10|6956|444.38|g the furiously careful excuses haggle quickly thinly special Tiresias. furiously express foxes after the quickly regular deposits sleep ironic packages|
+179|9|1954|372.75|even dependencies print carefully. deposits boost blithely about the ironic, ironic accounts. express, regular deposits are. bli|
+179|8|2710|277.15|d the frets. pending packages doze quickly across the furiously regular deposits. pending, even deposits impress ironic ideas. quickly regular r|
+179|7|4776|8.39|sly special pinto beans. pinto beans cajole. carefully unusual ideas around the silent accounts are blithely carefully ev|
+180|1|2467|440.25| instructions affix. regular packages cajole quickly. carefully express asymptotes use furiously around the pendin|
+180|10|1108|934.59|hinly after the regular, unusual asymptotes! carefully regular theodolites sublate. regular, ironic deposits against the regular pinto beans nag ca|
+180|9|724|426.16|e, regular accounts. furiously final ideas are furiously above the bold, silent asymptotes. sly instructions are carefully quickly final sentiments. furiously ironic foxes cajole bold, exp|
+180|8|5899|864.83|hin the carefully furious pinto beans. furiously ironic pinto beans use slyly above the even instructio|
+181|2|2416|844.44|ully. theodolites throughout the blithely unusual pinto bea|
+181|2|3242|886.53| express ideas nag carefully brave accounts. slyly express deposits would affix. final, special requests against the slyl|
+181|2|215|938.29| accounts boost furiously furiously blithe theodolites. slyly bold requests unwind special, unusual requests. furious ideas boost quickly pending |
+181|2|1122|657.25|lyly fluffily pending foxes. fluffily ironic pains haggle. thinly regular requests against the deposits affix after the never ev|
+182|3|9699|535.27|ound the furiously regular foxes. pending requests dazzle along |
+182|3|960|519.36|arefully pending dependencies are always slyly unusual pin|
+182|3|6243|741.46|accounts are slyly. furiously ironic requests haggle. express, special instructions against the ironic theodolites use s|
+182|3|6146|365.00|s. blithely express theodolites sleep blithely alongside of the requests?|
+183|4|30|875.44|slyly. furiously regular instructions cajole slyly about the pending, final theodolites. blithely final deposits cajole fluffily alo|
+183|4|4482|424.86|es. depths affix fluffily. bold instructions haggle. ruthless instructions must have to boost|
+183|4|8707|884.26|posits wake. blithely pending requests nag furiously alongside of the p|
+183|4|333|678.16|ost final, final theodolites. slyly bold foxes dazzle carefully furiously regular accounts. regular, sly instructions about the furiously regular excuses nag blithely abou|
+184|5|7069|449.45|nal ideas. blithely final ideas haggle against the pinto beans. qu|
+184|5|9193|576.88|uickly quick dependencies could detect furiously. final packages p|
+184|5|6400|551.90|ss dependencies. quickly even pinto beans are. express accounts a|
+184|5|831|186.84|kages cajole carefully furiously ironic instructions. deposits use bl|
+185|6|1475|538.58|unts hinder slyly. quickly express ideas sleep carefully |
+185|6|6244|213.04|ly unusual decoys are furiously quickly regular packages. bold, ironic foxes cajole fluffily around|
+185|6|7245|426.74|sleep blithely alongside of the regular excuses. even, regular|
+185|6|8014|510.23|lithely even ideas. regular platelets wake carefully ironic, special instructions! final pearls above the fluffily quiet ideas use furiously about the |
+186|7|1095|252.84|. carefully regular pinto beans according to the blithely close asymptotes haggle carefully special requests. packages cajole up the furi|
+186|7|1945|18.75|nic foxes boost carefully careful packages: express, fluffy dolphins nag quickly ironic packages. slyly bold requests nag amon|
+186|7|8838|729.42|ing asymptotes. enticingly regular theodolites mai|
+186|7|7898|812.37|ctions sleep silently carefully bold platelets. furiously ironic dependencies boost. regular de|
+187|8|8656|238.66|tes use along the even foxes? final foxes haggle pinto beans. slyly ironic theodolites are according to the deposits. furiously pending reques|
+187|8|4945|316.64|eposits boost quickly bold requests. furiously regular ideas boost boldly. special, express dependencies are fluffily slyly reg|
+187|8|3183|362.75|t the bold platelets. fluffily express platelets cajole fluffily along the always bold requests. blith|
+187|8|7440|989.71|e slyly against the slyly regular pinto beans. requests haggle carefully around the asymptotes. regular, regular asymptotes use furiously some|
+188|9|4835|771.95|pains are fluffily about the fluffily pending asymptot|
+188|9|2620|331.70|elets nag slyly regular pinto beans. slyly even dugouts above the blithely unusual theodolites su|
+188|9|730|713.62|nag against the final accounts. blithely pending attainments lose. silent requests wake quickly. careful|
+188|9|5430|920.20|uriously. special, regular instructions sleep along the accounts. quickly even foxes across the regular theodolites hang u|
+189|10|1305|392.50|packages. regular, unusual accounts lose furiously fluffily regular platelets. requests sleep carefully dependenc|
+189|10|8777|573.22|beans cajole slyly ironic requests. requests are quickly unusual, even packages. ironic frays haggle. blithely pending requests nod slyly. express, silent requests against the slyly unusual |
+189|10|6369|946.07|ts hinder slyly regular, unusual foxes. final sentiments use above the slyly r|
+189|10|2505|593.23| the deposits. special deposits sleep-- furiously regular sauternes solve furiously across the furiously regular pack|
+190|1|535|621.53|unts must have to haggle; slyly ironic accounts affix slyly alongside of the carefully even accounts. furious deposits haggle quietly among the packages. blithely |
+190|1|5845|608.91| haggle along the carefully unusual pinto beans. quickly final accounts sleep a|
+190|1|4579|396.60|inal, final foxes. regular, even deposits wake blithely! silent, regular packages integrate according to the slyly regular deposits. ironic, ironic notornis ha|
+190|1|2861|458.00|s cajole slyly across the daring, final pinto beans. carefully quiet requests affix along the a|
+191|2|8310|521.06|the slowly regular deposits. special accounts along the quickly unusual|
+191|3|1693|464.46|y. slyly unusual waters across the special pinto beans nag blithely according to the busy deposits. carefully regular accounts are against the regular accounts; perman|
+191|4|597|126.96|ly final accounts should have to boost above the doggedly express pinto beans. blithely regular packages cajole furiously bold requests. fluf|
+191|5|9673|119.41|press deposits kindle theodolites! slyly final dependencies against the blithely final packages sleep slyly regular requests. theodolites cajole furiously quickly bold a|
+192|3|606|198.69|inal platelets integrate regular accounts. accounts wake ironic, silent accounts. slyly unusual accounts kindle carefully-|
+192|4|2656|916.16|uickly. slyly bold ideas affix special, close theodolites. ironic, pending requests use carefully. blithely regular |
+192|5|1811|359.59|ly carefully special asymptotes. furiously pending instructions haggle blithely bravely pending requests. carefully f|
+192|6|8305|861.23|s against the carefully regular foxes haggle fluffily across the pending accounts. blithely final packages sleep after the furiously ironic theodolites. quickly bold r|
+193|4|6184|335.98| quickly bold deposits cajole furiously ruthless courts. carefully|
+193|5|4762|606.19|ns sleep against the furiously regular asymptotes. carefully even asymptotes across the daringly final packages sleep fluf|
+193|6|385|571.71|ons. slyly ironic deposits wake furiously ironic, unus|
+193|7|9791|478.52|quests. carefully even requests use regular excuses. pending accounts are. furiously even pinto beans haggle furi|
+194|5|4289|662.17|ic Tiresias serve along the ironic, express accounts. quickly final requests are slyly among the carefully special requests. accounts boost.|
+194|6|377|430.21|efully instead of the special ideas. fluffily unusual asymptotes cajole blithely after the regular ideas. final accounts along the silent ex|
+194|7|5294|913.46|indle fluffily despite the carefully silent instructions. furiously regular hockey players cajole slyly unusual accounts. furiously regular realms cajole furiously according to the e|
+194|8|7890|79.40|ctions sleep. carefully unusual theodolites should wake furiously across the deposits-- furiously bold excuses boost furiously carefully slow accounts. boldly final accounts grow. regular excuse|
+195|6|9985|20.39|efully among the fluffily even accounts! requests are slyly ag|
+195|7|2947|271.39|yly regular requests cajole carefully. carefully fina|
+195|8|319|102.58|ts. ironic foxes wake carefully slyly special pinto beans. blithely silent excuses hinder blithely quietly regular accounts. quickly careful foxes maintain slyly above the slyly express fo|
+195|9|2803|992.27|xes according to the regular foxes wake furiously final theodolites. furiously regular packages sleep slyly express theodolites. slyly thin instructions sleep r|
+196|7|3843|859.90|l platelets use blithely alongside of the enticingly final deposits. fluffily final requests boost furiously ag|
+196|8|2515|966.01|final theodolites. fluffily even deposits are against the|
+196|9|4778|37.61|fully final requests cajole fluffily across the furiously ironic accounts. qui|
+196|10|1068|928.25| cajole about the blithely regular ideas. final ideas hin|
+197|8|9678|753.88|ously. slyly stealthy requests use alongside of the express, unusual packages. final deposits wake. carefully unusual theodolites cajole slyly about the regular foxes. slyly iron|
+197|9|2631|279.05|e blithely. quickly final deposits wake fluffily excuses. even, unusual deposits x-ray among the final accounts. even ideas above the blithely ironic requests sleep furiously slyly final inst|
+197|10|7598|845.51|lets according to the regular deposits wake furiously about the carefully daring theodolites. blithely express dolphins poach after th|
+197|1|8950|897.33|ideas. requests wake above the blithely unusual deposits. slyly regular |
+198|9|6878|587.41|y even accounts poach carefully about the asymptotes. deposits haggle slyly. finally unusual requests run silently regular, bold packages: instructions after the |
+198|10|6493|673.99|y express excuses use blithely among the pending accounts. stealthy ide|
+198|1|8410|166.93|kages. blithely final theodolites dazzle fluffily. accounts boost furiously. furiously unu|
+198|2|6190|697.10|beans nag fluffily about the asymptotes. slyly bold escapades haggle quickly. fluffily special requests haggle above the ironic,|
+199|10|9343|79.70|ending accounts nag across the instructions. carefully express packages over the blithely even pac|
+199|1|8199|46.52|oost slyly. ironic platelets sleep blithely about the slyly silent foxes. furiously even pl|
+199|2|2742|890.63| the special deposits? carefully final deposits about the carefully regular sauternes |
+199|3|7167|884.56|onic platelets use carefully along the slowly stealthy ideas. slyly dogged instructions are quickly above the slyly u|
+200|1|3120|776.41|ntly final packages kindle furiously blithely ironic accounts. carefully final packages according to the carefully |
+200|2|5392|242.52|y unusual ideas. ruthlessly express asymptotes cajole. regular theodolites are. carefully silent deposits poach carefully across the fluffily even theodolites. carefully express realms hag|
+200|3|9408|307.79|oxes! fluffily regular requests use against the unusual, slow ideas. ironic accounts doze b|
+200|4|331|466.07| slyly even requests. fluffily final packages boost carefully express instructions. slyly regular forges are blithely unusual, regular |
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/region.tbl b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/region.tbl
new file mode 100644
index 0000000..c5ebb63
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/region.tbl
@@ -0,0 +1,5 @@
+0|AFRICA|lar deposits. blithely final packages cajole. regular waters are final requests. regular accounts are according to |
+1|AMERICA|hs use ironic, even requests. s|
+2|ASIA|ges. thinly even pinto beans ca|
+3|EUROPE|ly final courts cajole furiously final excuse|
+4|MIDDLE EAST|uickly special accounts cajole carefully blithely close requests. carefully final asymptotes haggle furiousl|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/supplier.tbl b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/supplier.tbl
new file mode 100644
index 0000000..d9c0e9f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/data/supplier.tbl
@@ -0,0 +1,10 @@
+1|Supplier#000000001| N kD4on9OM Ipw3,gf0JBoQDd7tgrzrddZ|17|27-918-335-1736|5755.94|each slyly above the careful|
+2|Supplier#000000002|89eJ5ksX3ImxJQBvxObC,|5|15-679-861-2259|4032.68| slyly bold instructions. idle dependen|
+3|Supplier#000000003|q1,G3Pj6OjIuUYfUoH18BFTKP5aU9bEV3|1|11-383-516-1199|4192.40|blithely silent requests after the express dependencies are sl|
+4|Supplier#000000004|Bk7ah4CK8SYQTepEmvMkkgMwg|15|25-843-787-7479|4641.08|riously even requests above the exp|
+5|Supplier#000000005|Gcdm2rJRzl5qlTVzc|11|21-151-690-3663|-283.84|. slyly regular pinto bea|
+6|Supplier#000000006|tQxuVm7s7CnK|14|24-696-997-4969|1365.79|final accounts. regular dolphins use against the furiously ironic decoys. |
+7|Supplier#000000007|s,4TicNGB4uO6PaSqNBUq|23|33-990-965-2201|6820.35|s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit|
+8|Supplier#000000008|9Sq4bBH2FQEmaFOocY45sRTxo6yuoG|17|27-498-742-3860|7627.85|al pinto beans. asymptotes haggl|
+9|Supplier#000000009|1KhUgZegwM3ua7dsYmekYBsK|10|20-403-398-8662|5302.37|s. unusual, even requests along the furiously regular pac|
+10|Supplier#000000010|Saygah3gYWMp72i PY|24|34-852-489-8585|3891.91|ing waters. regular requests ar|
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml
new file mode 100644
index 0000000..47dfac5
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+    <name>fs.default.name</name>
+    <value>hdfs://127.0.0.1:31888</value>
+</property>
+<property>
+    <name>hadoop.tmp.dir</name>
+    <value>/tmp/hadoop</value>
+</property>
+
+
+</configuration>
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml.bak b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml.bak
new file mode 100644
index 0000000..2e248d4
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/core-site.xml.bak
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+    <name>fs.default.name</name>
+    <value>hdfs://localhost:31888</value>
+</property>
+<property>
+    <name>hadoop.tmp.dir</name>
+    <value>/tmp/hadoop</value>
+</property>
+
+
+</configuration>
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml
new file mode 100644
index 0000000..842e7ab
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+   <name>dfs.replication</name>
+   <value>2</value>
+</property>
+
+<property>
+	<name>dfs.block.size</name>
+	<value>65536</value>
+</property>
+
+</configuration>
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml.bak b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml.bak
new file mode 100644
index 0000000..e3c082b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/hdfs-site.xml.bak
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+   <name>dfs.replication</name>
+   <value>2</value>
+</property>
+
+<property>
+	<name>dfs.block.size</name>
+	<value>32768</value>
+</property>
+
+</configuration>
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml
new file mode 100644
index 0000000..1b9a4d6
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>localhost:29007</value>
+  </property>
+  <property>
+     <name>mapred.tasktracker.map.tasks.maximum</name>
+     <value>20</value>
+  </property>
+   <property>
+      <name>mapred.tasktracker.reduce.tasks.maximum</name>
+      <value>20</value>
+   </property>
+   <property>
+      <name>mapred.min.split.size</name>
+      <value>65536</value>
+   </property>
+
+</configuration>
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml.bak b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml.bak
new file mode 100644
index 0000000..7a51b86
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hadoop/conf/mapred-site.xml.bak
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>localhost:29007</value>
+  </property>
+  <property>
+     <name>mapred.tasktracker.map.tasks.maximum</name>
+     <value>20</value>
+  </property>
+   <property>
+      <name>mapred.tasktracker.reduce.tasks.maximum</name>
+      <value>20</value>
+   </property>
+   <property>
+      <name>mapred.min.split.size</name>
+      <value>32768</value>
+   </property>
+
+</configuration>
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hive/conf/hive-default.xml b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hive/conf/hive-default.xml
new file mode 100644
index 0000000..eef4071
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hive/conf/hive-default.xml
@@ -0,0 +1,853 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+	<!-- Hive Configuration can either be stored in this file or in the hadoop 
+		configuration files -->
+	<!-- that are implied by Hadoop setup variables. -->
+	<!-- Aside from Hadoop setup variables - this file is provided as a convenience 
+		so that Hive -->
+	<!-- users do not have to edit hadoop configuration files (that may be managed 
+		as a centralized -->
+	<!-- resource). -->
+
+	<!-- Hive Execution Parameters -->
+	<property>
+		<name>mapred.reduce.tasks</name>
+		<value>-1</value>
+		<description>The default number of reduce tasks per job. Typically set
+			to a prime close to the number of available hosts. Ignored when
+			mapred.job.tracker is "local". Hadoop set this to 1 by default,
+			whereas hive uses -1 as its default value.
+			By setting this property to
+			-1, Hive will automatically figure out what
+			should be the number of
+			reducers.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hyracks.connectorpolicy</name>
+		<value>SEND_SIDE_MAT_PIPELINING</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.host</name>
+		<value>127.0.0.1</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.port</name>
+		<value>13099</value>
+	</property>
+
+	<property>
+		<name>hive.hyracks.app</name>
+		<value>hivesterix</value>
+	</property>
+
+
+	<property>
+		<name>hive.hyracks.parrallelism</name>
+		<value>2</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.groupby.external</name>
+		<value>false</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.groupby.external.memory</name>
+		<value>3072</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.sort.memory</name>
+		<value>3072</value>
+	</property>
+
+	<property>
+		<name>hive.algebricks.framesize</name>
+		<value>768</value>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.bytes.per.reducer</name>
+		<value>1000000000</value>
+		<description>size per reducer.The default is 1G, i.e if the input size
+			is 10G, it will use 10 reducers.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.reducers.max</name>
+		<value>999</value>
+		<description>max number of reducers will be used. If the one
+			specified
+			in the configuration parameter mapred.reduce.tasks is
+			negative, hive
+			will use this one as the max number of reducers when
+			automatically
+			determine number of reducers.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.scratchdir</name>
+		<value>/tmp/hive-${user.name}</value>
+		<description>Scratch space for Hive jobs</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode</name>
+		<value>false</value>
+		<description>whether hive is running in test mode. If yes, it turns on
+			sampling and prefixes the output tablename
+		</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.prefix</name>
+		<value>test_</value>
+		<description>if hive is running in test mode, prefixes the output
+			table by this string
+		</description>
+	</property>
+
+	<!-- If the input table is not bucketed, the denominator of the tablesample 
+		is determinied by the parameter below -->
+	<!-- For example, the following query: -->
+	<!-- INSERT OVERWRITE TABLE dest -->
+	<!-- SELECT col1 from src -->
+	<!-- would be converted to -->
+	<!-- INSERT OVERWRITE TABLE test_dest -->
+	<!-- SELECT col1 from src TABLESAMPLE (BUCKET 1 out of 32 on rand(1)) -->
+	<property>
+		<name>hive.test.mode.samplefreq</name>
+		<value>32</value>
+		<description>if hive is running in test mode and table is not
+			bucketed, sampling frequency
+		</description>
+	</property>
+
+	<property>
+		<name>hive.test.mode.nosamplelist</name>
+		<value></value>
+		<description>if hive is running in test mode, dont sample the above
+			comma seperated list of tables
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.local</name>
+		<value>true</value>
+		<description>controls whether to connect to remove metastore server or
+			open a new metastore server in Hive Client JVM
+		</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionURL</name>
+		<value>jdbc:derby:;databaseName=metastore_db;create=true</value>
+		<description>JDBC connect string for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionDriverName</name>
+		<value>org.apache.derby.jdbc.EmbeddedDriver</value>
+		<description>Driver class name for a JDBC metastore</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.PersistenceManagerFactoryClass</name>
+		<value>org.datanucleus.jdo.JDOPersistenceManagerFactory</value>
+		<description>class implementing the jdo persistence</description>
+	</property>
+
+	<property>
+		<name>datanucleus.connectionPoolingType</name>
+		<value>DBCP</value>
+		<description>Uses a DBCP connection pool for JDBC metastore
+		</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.DetachAllOnCommit</name>
+		<value>true</value>
+		<description>detaches all objects from session so that they can be
+			used after transaction is committed
+		</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.NonTransactionalRead</name>
+		<value>true</value>
+		<description>reads outside of transactions</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionUserName</name>
+		<value>APP</value>
+		<description>username to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>javax.jdo.option.ConnectionPassword</name>
+		<value>mine</value>
+		<description>password to use against metastore database</description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateTables</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateColumns</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.validateConstraints</name>
+		<value>false</value>
+		<description>validates existing schema against code. turn this on if
+			you want to verify existing schema
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.storeManagerType</name>
+		<value>rdbms</value>
+		<description>metadata store type</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoCreateSchema</name>
+		<value>true</value>
+		<description>creates necessary schema on a startup if one doesn't
+			exist. set this to false, after creating it once
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.autoStartMechanismMode</name>
+		<value>checked</value>
+		<description>throw exception if metadata tables are incorrect
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.transactionIsolation</name>
+		<value>read-committed</value>
+		<description>Default transaction isolation level for identity
+			generation.
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2</name>
+		<value>false</value>
+		<description>Use a level 2 cache. Turn this off if metadata is changed
+			independently of hive metastore server
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.cache.level2.type</name>
+		<value>SOFT</value>
+		<description>SOFT=soft reference based cache, WEAK=weak reference
+			based cache.
+		</description>
+	</property>
+
+	<property>
+		<name>datanucleus.identifierFactory</name>
+		<value>datanucleus</value>
+		<description>Name of the identifier factory to use when generating
+			table/column names etc. 'datanucleus' is used for backward
+			compatibility
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.warehouse.dir</name>
+		<value>/tmp/hivesterix</value>
+		<description>location of default database for the warehouse
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.connect.retries</name>
+		<value>5</value>
+		<description>Number of retries while opening a connection to metastore
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.rawstore.impl</name>
+		<value>org.apache.hadoop.hive.metastore.ObjectStore</value>
+		<description>Name of the class that implements
+			org.apache.hadoop.hive.metastore.rawstore interface. This class is
+			used to store and retrieval of raw metadata objects such as table,
+			database
+		</description>
+	</property>
+
+	<property>
+		<name>hive.default.fileformat</name>
+		<value>TextFile</value>
+		<description>Default file format for CREATE TABLE statement. Options
+			are TextFile and SequenceFile. Users can explicitly say CREATE TABLE
+			... STORED AS &lt;TEXTFILE|SEQUENCEFILE&gt; to override</description>
+	</property>
+
+	<property>
+		<name>hive.fileformat.check</name>
+		<value>true</value>
+		<description>Whether to check file format or not when loading data
+			files
+		</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr</name>
+		<value>true</value>
+		<description>Whether to use map-side aggregation in Hive Group By
+			queries
+		</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.skewindata</name>
+		<value>false</value>
+		<description>Whether there is skew in data to optimize group by
+			queries
+		</description>
+	</property>
+
+	<property>
+		<name>hive.groupby.mapaggr.checkinterval</name>
+		<value>100000</value>
+		<description>Number of rows after which size of the grouping
+			keys/aggregation classes is performed
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.local.mem</name>
+		<value>0</value>
+		<description>For local mode, memory of the mappers/reducers
+		</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.percentmemory</name>
+		<value>0.5</value>
+		<description>Portion of total memory to be used by map-side grup
+			aggregation hash table
+		</description>
+	</property>
+
+	<property>
+		<name>hive.map.aggr.hash.min.reduction</name>
+		<value>0.5</value>
+		<description>Hash aggregation will be turned off if the ratio between
+			hash
+			table size and input rows is bigger than this number. Set to 1 to
+			make
+			sure
+			hash aggregation is never turned off.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.cp</name>
+		<value>true</value>
+		<description>Whether to enable column pruner</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.ppd</name>
+		<value>true</value>
+		<description>Whether to enable predicate pushdown</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.pruner</name>
+		<value>true</value>
+		<description>Whether to enable the new partition pruner which depends
+			on predicate pushdown. If this is disabled,
+			the old partition pruner
+			which is based on AST will be enabled.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.groupby</name>
+		<value>true</value>
+		<description>Whether to enable the bucketed group by from bucketed
+			partitions/tables.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.join.emit.interval</name>
+		<value>1000</value>
+		<description>How many rows in the right-most join operand Hive should
+			buffer before emitting the join result.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.join.cache.size</name>
+		<value>25000</value>
+		<description>How many rows in the joining tables (except the streaming
+			table) should be cached in memory.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.bucket.cache.size</name>
+		<value>100</value>
+		<description>How many values in each keys in the map-joined table
+			should be cached in memory.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.maxsize</name>
+		<value>100000</value>
+		<description>Maximum # of rows of the small table that can be handled
+			by map-side join. If the size is reached and hive.task.progress is
+			set, a fatal error counter is set and the job will be killed.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapjoin.cache.numrows</name>
+		<value>25000</value>
+		<description>How many rows should be cached by jdbm for map join.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.skewjoin</name>
+		<value>false</value>
+		<description>Whether to enable skew join optimization. </description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.key</name>
+		<value>100000</value>
+		<description>Determine if we get a skew key in join. If we see more
+			than the specified number of rows with the same key in join operator,
+			we think the key as a skew join key.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.map.tasks</name>
+		<value>10000</value>
+		<description> Determine the number of map task used in the follow up
+			map join job
+			for a skew join. It should be used together with
+			hive.skewjoin.mapjoin.min.split
+			to perform a fine grained control.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.skewjoin.mapjoin.min.split</name>
+		<value>33554432</value>
+		<description> Determine the number of map task at most used in the
+			follow up map join job
+			for a skew join by specifying the minimum split
+			size. It should be used
+			together with
+			hive.skewjoin.mapjoin.map.tasks
+			to perform a fine grained control.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.mode</name>
+		<value>nonstrict</value>
+		<description>The mode in which the hive operations are being
+			performed. In strict mode, some risky queries are not allowed to run
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.maxerrsize</name>
+		<value>100000</value>
+		<description>Maximum number of bytes a script is allowed to emit to
+			standard error (per map-reduce task). This prevents runaway scripts
+			from filling logs partitions to capacity
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.script.allow.partial.consumption</name>
+		<value>false</value>
+		<description> When enabled, this option allows a user script to exit
+			successfully without consuming all the data from the standard input.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.operator.id.env.var</name>
+		<value>HIVE_SCRIPT_OPERATOR_ID</value>
+		<description> Name of the environment variable that holds the unique
+			script operator ID in the user's transform function (the custom
+			mapper/reducer that the user has specified in the query)
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.output</name>
+		<value>false</value>
+		<description> This controls whether the final outputs of a query (to a
+			local/hdfs file or a hive table) is compressed. The compression codec
+			and other options are determined from hadoop config variables
+			mapred.output.compress*
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.compress.intermediate</name>
+		<value>false</value>
+		<description> This controls whether intermediate files produced by
+			hive between multiple map-reduce jobs are compressed. The compression
+			codec and other options are determined from hadoop config variables
+			mapred.output.compress*
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel</name>
+		<value>false</value>
+		<description>Whether to execute jobs in parallel</description>
+	</property>
+
+	<property>
+		<name>hive.exec.parallel.thread.number</name>
+		<value>8</value>
+		<description>How many jobs at most can be executed in parallel
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.war.file</name>
+		<value>lib\hive-hwi-0.7.0.war</value>
+		<description>This sets the path to the HWI war file, relative to
+			${HIVE_HOME}.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.host</name>
+		<value>0.0.0.0</value>
+		<description>This is the host address the Hive Web Interface will
+			listen on
+		</description>
+	</property>
+
+	<property>
+		<name>hive.hwi.listen.port</name>
+		<value>9999</value>
+		<description>This is the port the Hive Web Interface will listen on
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.pre.hooks</name>
+		<value></value>
+		<description>Pre Execute Hook for Tests</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapfiles</name>
+		<value>true</value>
+		<description>Merge small files at the end of a map-only job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.merge.mapredfiles</name>
+		<value>false</value>
+		<description>Merge small files at the end of a map-reduce job
+		</description>
+	</property>
+
+	<property>
+		<name>hive.heartbeat.interval</name>
+		<value>1000</value>
+		<description>Send a heartbeat after this interval - used by mapjoin
+			and filter operators
+		</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.per.task</name>
+		<value>256000000</value>
+		<description>Size of merged files at the end of the job</description>
+	</property>
+
+	<property>
+		<name>hive.merge.size.smallfiles.avgsize</name>
+		<value>16000000</value>
+		<description>When the average output file size of a job is less than
+			this number, Hive will start an additional map-reduce job to merge
+			the output files into bigger files. This is only done for map-only
+			jobs if hive.merge.mapfiles is true, and for map-reduce jobs if
+			hive.merge.mapredfiles is true.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive Tranform/Map/Reduce Clause should
+			automatically send progress information to TaskTracker to avoid the
+			task getting killed because of inactivity. Hive sends progress
+			information when the script is outputting to stderr. This option
+			removes the need of periodically producing stderr messages, but users
+			should be cautious because this may prevent infinite loops in the
+			scripts to be killed by TaskTracker.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.serde</name>
+		<value>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</value>
+		<description>The default serde for trasmitting input data to and
+			reading output data from the user scripts.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.recordreader</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordReader</value>
+		<description>The default record reader for reading data from the user
+			scripts.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.script.recordwriter</name>
+		<value>org.apache.hadoop.hive.ql.exec.TextRecordWriter</value>
+		<description>The default record writer for writing data to the user
+			scripts.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.input.format</name>
+		<value>org.apache.hadoop.hive.ql.io.HiveInputFormat</value>
+		<description>The default input format, if it is not specified, the
+			system assigns it. It is set to HiveInputFormat for hadoop versions
+			17, 18 and 19, whereas it is set to CombinedHiveInputFormat for
+			hadoop 20. The user can always overwrite it - if there is a bug in
+			CombinedHiveInputFormat, it can always be manually set to
+			HiveInputFormat.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.udtf.auto.progress</name>
+		<value>false</value>
+		<description>Whether Hive should automatically send progress
+			information to TaskTracker when using UDTF's to prevent the task
+			getting killed because of inactivity. Users should be cautious
+			because this may prevent TaskTracker from killing tasks with infinte
+			loops.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.mapred.reduce.tasks.speculative.execution</name>
+		<value>true</value>
+		<description>Whether speculative execution for reducers should be
+			turned on.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.counters.pull.interval</name>
+		<value>1000</value>
+		<description>The interval with which to poll the JobTracker for the
+			counters the running job. The smaller it is the more load there will
+			be on the jobtracker, the higher it is the less granular the caught
+			will be.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.enforce.bucketing</name>
+		<value>false</value>
+		<description>Whether bucketing is enforced. If true, while inserting
+			into the table, bucketing is enforced.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.enforce.sorting</name>
+		<value>false</value>
+		<description>Whether sorting is enforced. If true, while inserting
+			into the table, sorting is enforced.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.connection.url.hook</name>
+		<value></value>
+		<description>Name of the hook to use for retriving the JDO connection
+			URL. If empty, the value in javax.jdo.option.ConnectionURL is used
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.attempts</name>
+		<value>1</value>
+		<description>The number of times to retry a metastore call if there
+			were a connection error
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.ds.retry.interval</name>
+		<value>1000</value>
+		<description>The number of miliseconds between metastore retry
+			attempts
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.min.threads</name>
+		<value>200</value>
+		<description>Minimum number of worker threads in the Thrift server's
+			pool.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.max.threads</name>
+		<value>100000</value>
+		<description>Maximum number of worker threads in the Thrift server's
+			pool.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.metastore.server.tcp.keepalive</name>
+		<value>true</value>
+		<description>Whether to enable TCP keepalive for the metastore server.
+			Keepalive will prevent accumulation of half-open connections.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.optimize.reducededuplication</name>
+		<value>true</value>
+		<description>Remove extra map-reduce jobs if the data is already
+			clustered by the same key which needs to be used again. This should
+			always be set to true. Since it is a new feature, it has been made
+			configurable.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition</name>
+		<value>false</value>
+		<description>Whether or not to allow dynamic partitions in DML/DDL.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.dynamic.partition.mode</name>
+		<value>strict</value>
+		<description>In strict mode, the user must specify at least one static
+			partition in case the user accidentally overwrites all partitions.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions</name>
+		<value>1000</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in total.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.max.dynamic.partitions.pernode</name>
+		<value>100</value>
+		<description>Maximum number of dynamic partitions allowed to be
+			created in each mapper/reducer node.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.default.partition.name</name>
+		<value>__HIVE_DEFAULT_PARTITION__</value>
+		<description>The default partition name in case the dynamic partition
+			column value is null/empty string or anyother values that cannot be
+			escaped. This value must not contain any special character used in
+			HDFS URI (e.g., ':', '%', '/' etc). The user has to be aware that the
+			dynamic partition value should not contain this value to avoid
+			confusions.
+		</description>
+	</property>
+
+	<property>
+		<name>fs.har.impl</name>
+		<value>org.apache.hadoop.hive.shims.HiveHarFileSystem</value>
+		<description>The implementation for accessing Hadoop Archives. Note
+			that this won't be applicable to Hadoop vers less than 0.20
+		</description>
+	</property>
+
+	<property>
+		<name>hive.archive.enabled</name>
+		<value>false</value>
+		<description>Whether archiving operations are permitted</description>
+	</property>
+
+	<property>
+		<name>hive.archive.har.parentdir.settable</name>
+		<value>false</value>
+		<description>In new Hadoop versions, the parent directory must be set
+			while
+			creating a HAR. Because this functionality is hard to detect
+			with just
+			version
+			numbers, this conf var needs to be set manually.
+		</description>
+	</property>
+
+	<!-- HBase Storage Handler Parameters -->
+
+	<property>
+		<name>hive.hbase.wal.enabled</name>
+		<value>true</value>
+		<description>Whether writes to HBase should be forced to the
+			write-ahead log. Disabling this improves HBase write performance at
+			the risk of lost writes in case of a crash.
+		</description>
+	</property>
+
+	<property>
+		<name>hive.exec.drop.ignorenonexistent</name>
+		<value>true</value>
+		<description>drop table always works.</description>
+	</property>
+
+</configuration>
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hive/conf/topology.xml b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hive/conf/topology.xml
new file mode 100644
index 0000000..4710706
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hive/conf/topology.xml
@@ -0,0 +1,7 @@
+<cluster-topology>
+    <network-switch name="Global">
+        <network-switch name="local">
+            <terminal name="127.0.0.1"/>
+        </network-switch>
+    </network-switch>
+</cluster-topology>
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/ignore.txt b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/ignore.txt
new file mode 100644
index 0000000..e70ea78
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/ignore.txt
@@ -0,0 +1 @@
+q16
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/logging.properties b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/logging.properties
new file mode 100644
index 0000000..1cc34e1
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/logging.properties
@@ -0,0 +1,65 @@
+############################################################
+#  	Default Logging Configuration File
+#
+# You can use a different file by specifying a filename
+# with the java.util.logging.config.file system property.  
+# For example java -Djava.util.logging.config.file=myfile
+############################################################
+
+############################################################
+#  	Global properties
+############################################################
+
+# "handlers" specifies a comma separated list of log Handler 
+# classes.  These handlers will be installed during VM startup.
+# Note that these classes must be on the system classpath.
+# By default we only configure a ConsoleHandler, which will only
+# show messages at the INFO and above levels.
+
+handlers= java.util.logging.ConsoleHandler
+
+# To also add the FileHandler, use the following line instead.
+
+# handlers= java.util.logging.FileHandler, java.util.logging.ConsoleHandler
+
+# Default global logging level.
+# This specifies which kinds of events are logged across
+# all loggers.  For any given facility this global level
+# can be overriden by a facility specific level
+# Note that the ConsoleHandler also has a separate level
+# setting to limit messages printed to the console.
+
+.level= WARNING
+# .level= INFO
+# .level= FINE
+# .level = FINEST
+
+############################################################
+# Handler specific properties.
+# Describes specific configuration info for Handlers.
+############################################################
+
+# default file output is in user's home directory.
+
+# java.util.logging.FileHandler.pattern = %h/java%u.log
+# java.util.logging.FileHandler.limit = 50000
+# java.util.logging.FileHandler.count = 1
+# java.util.logging.FileHandler.formatter = java.util.logging.XMLFormatter
+
+# Limit the message that are printed on the console to FINE and above.
+
+java.util.logging.ConsoleHandler.level = FINE
+java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
+
+
+############################################################
+# Facility specific properties.
+# Provides extra control for each logger.
+############################################################
+
+# For example, set the com.xyz.foo logger to only log SEVERE
+# messages:
+
+edu.uci.ics.asterix.level = WARNING
+edu.uci.ics.algebricks.level = WARNING
+edu.uci.ics.hyracks.level = WARNING
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q10_returned_item.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q10_returned_item.hive
new file mode 100644
index 0000000..3f1214a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q10_returned_item.hive
@@ -0,0 +1,37 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q10_returned_item;
+
+-- create the tables and load the data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the result table
+create table q10_returned_item (c_custkey int, c_name string, revenue double, c_acctbal string, n_name string, c_address string, c_phone string, c_comment string);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q10_returned_item
+select 
+  c_custkey, c_name, sum(l_extendedprice * (1 - l_discount)) as revenue, 
+  c_acctbal, n_name, c_address, c_phone, c_comment
+from
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey and o.o_orderdate >= '1993-10-01' and o.o_orderdate < '1994-01-01'
+  join nation n 
+  on 
+    c.c_nationkey = n.n_nationkey
+  join lineitem l 
+  on 
+    l.l_orderkey = o.o_orderkey and l.l_returnflag = 'R'
+group by c_custkey, c_name, c_acctbal, c_phone, n_name, c_address, c_comment 
+order by revenue desc 
+limit 20;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q11_important_stock.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q11_important_stock.hive
new file mode 100644
index 0000000..de0cfc3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q11_important_stock.hive
@@ -0,0 +1,47 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q11_important_stock;
+DROP TABLE IF EXISTS q11_part_tmp;
+DROP TABLE IF EXISTS q11_sum_tmp;
+
+-- create tables and load data
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+
+-- create the target table
+create table q11_important_stock(ps_partkey INT, value DOUBLE);
+create table q11_part_tmp(ps_partkey int, part_value double);
+create table q11_sum_tmp(total_value double);
+
+-- the query
+insert overwrite table q11_part_tmp
+select 
+  ps_partkey, sum(ps_supplycost * ps_availqty) as part_value 
+from
+  nation n join supplier s 
+  on 
+    s.s_nationkey = n.n_nationkey
+  join partsupp ps 
+  on 
+    ps.ps_suppkey = s.s_suppkey
+group by ps_partkey;
+
+insert overwrite table q11_sum_tmp
+select 
+  sum(part_value) as total_value
+from 
+  q11_part_tmp;
+
+insert overwrite table q11_important_stock
+select 
+  ps_partkey, part_value as value
+from
+  (
+    select ps_partkey, part_value, total_value
+    from q11_part_tmp join q11_sum_tmp
+  ) a
+where part_value > total_value * 0.00001
+order by value desc;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q12_shipping.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q12_shipping.hive
new file mode 100644
index 0000000..062f7b9
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q12_shipping.hive
@@ -0,0 +1,42 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q12_shipping;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create the result table
+create table q12_shipping(l_shipmode string, high_line_count double, low_line_count double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q12_shipping
+select 
+  l_shipmode,
+  sum(case
+    when o_orderpriority ='1-URGENT'
+         or o_orderpriority ='2-HIGH'
+    then 1
+    else 0
+end
+  ) as high_line_count,
+  sum(case
+    when o_orderpriority <> '1-URGENT'
+         and o_orderpriority <> '2-HIGH'
+    then 1
+    else 0
+end
+  ) as low_line_count
+from
+  orders o join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey and l.l_commitdate < l.l_receiptdate
+and l.l_shipdate < l.l_commitdate and l.l_receiptdate >= '1994-01-01' 
+and l.l_receiptdate < '1995-01-01'
+where 
+  l.l_shipmode = 'MAIL' or l.l_shipmode = 'SHIP'
+group by l_shipmode
+order by l_shipmode;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q13_customer_distribution.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q13_customer_distribution.hive
new file mode 100644
index 0000000..a799008
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q13_customer_distribution.hive
@@ -0,0 +1,27 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q13_customer_distribution;
+
+-- create the tables and load the data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create the result table
+create table q13_customer_distribution (c_count int, custdist int);
+
+-- the query
+insert overwrite table q13_customer_distribution
+select 
+  c_count, count(1) as custdist
+from 
+  (select 
+     c_custkey, count(o_orderkey) as c_count
+   from 
+     customer c left outer join orders o 
+     on 
+       c.c_custkey = o.o_custkey and not o.o_comment like '%special%requests%'
+   group by c_custkey
+   ) c_orders
+group by c_count
+order by custdist desc, c_count desc;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q14_promotion_effect.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q14_promotion_effect.hive
new file mode 100644
index 0000000..988f400
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q14_promotion_effect.hive
@@ -0,0 +1,28 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q14_promotion_effect;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q14_promotion_effect(promo_revenue double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q14_promotion_effect
+select 
+  100.00 * sum(case
+               when p_type like 'PROMO%'
+               then l_extendedprice*(1-l_discount)
+               else 0.0
+               end
+  ) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
+from 
+  part p join lineitem l 
+  on 
+    l.l_partkey = p.p_partkey and l.l_shipdate >= '1995-09-01' and l.l_shipdate < '1995-10-01';
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q15_top_supplier.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q15_top_supplier.hive
new file mode 100644
index 0000000..04064ed
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q15_top_supplier.hive
@@ -0,0 +1,45 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS revenue;
+DROP TABLE IF EXISTS max_revenue;
+DROP TABLE IF EXISTS q15_top_supplier;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+
+-- create result tables
+create table revenue(supplier_no int, total_revenue double); 
+create table max_revenue(max_revenue double); 
+create table q15_top_supplier(s_suppkey int, s_name string, s_address string, s_phone string, total_revenue double);
+
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table revenue
+select 
+  l_suppkey as supplier_no, sum(l_extendedprice * (1 - l_discount)) as total_revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1996-01-01' and l_shipdate < '1996-04-01'
+group by l_suppkey;
+
+insert overwrite table max_revenue
+select 
+  max(total_revenue)
+from 
+  revenue;
+
+insert overwrite table q15_top_supplier
+select 
+  s_suppkey, s_name, s_address, s_phone, total_revenue
+from supplier s join revenue r 
+  on 
+    s.s_suppkey = r.supplier_no
+  join max_revenue m 
+  on 
+    r.total_revenue = m.max_revenue
+order by s_suppkey;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q16_parts_supplier_relationship.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q16_parts_supplier_relationship.hive
new file mode 100644
index 0000000..971ef99
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q16_parts_supplier_relationship.hive
@@ -0,0 +1,53 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS q16_parts_supplier_relationship;
+DROP TABLE IF EXISTS q16_tmp;
+DROP TABLE IF EXISTS supplier_tmp;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+
+-- create the result table
+create table q16_parts_supplier_relationship(p_brand string, p_type string, p_size int, supplier_cnt int);
+create table q16_tmp(p_brand string, p_type string, p_size int, ps_suppkey int);
+create table supplier_tmp(s_suppkey int);
+
+-- the query
+insert overwrite table supplier_tmp
+select 
+  s_suppkey
+from 
+  supplier
+where 
+  not s_comment like '%Customer%Complaints%';
+
+insert overwrite table q16_tmp
+select 
+  p_brand, p_type, p_size, ps_suppkey
+from 
+  partsupp ps join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_brand <> 'Brand#45' 
+    and not p.p_type like 'MEDIUM POLISHED%'
+  join supplier_tmp s 
+  on 
+    ps.ps_suppkey = s.s_suppkey;
+
+insert overwrite table q16_parts_supplier_relationship
+select 
+  p_brand, p_type, p_size, count(distinct ps_suppkey) as supplier_cnt
+from 
+  (select 
+     * 
+   from
+     q16_tmp 
+   where p_size = 49 or p_size = 14 or p_size = 23 or
+         p_size = 45 or p_size = 19 or p_size = 3 or
+         p_size = 36 or p_size = 9
+) q16_all
+group by p_brand, p_type, p_size
+order by supplier_cnt desc, p_brand, p_type, p_size;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q17_small_quantity_order_revenue.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q17_small_quantity_order_revenue.hive
new file mode 100644
index 0000000..db7746b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q17_small_quantity_order_revenue.hive
@@ -0,0 +1,37 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q17_small_quantity_order_revenue;
+DROP TABLE IF EXISTS lineitem_tmp;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q17_small_quantity_order_revenue (avg_yearly double);
+create table lineitem_tmp (t_partkey int, t_avg_quantity double);
+
+-- the query
+insert overwrite table lineitem_tmp
+select 
+  l_partkey as t_partkey, 0.2 * avg(l_quantity) as t_avg_quantity
+from 
+  lineitem
+group by l_partkey;
+
+insert overwrite table q17_small_quantity_order_revenue
+select
+  sum(l_extendedprice) / 7.0 as avg_yearly
+from
+  (select l_quantity, l_extendedprice, t_avg_quantity from
+   lineitem_tmp t join
+     (select
+        l_quantity, l_partkey, l_extendedprice
+      from
+        part p join lineitem l
+        on
+          p.p_partkey = l.l_partkey
+          and p.p_container = 'MED BOX'
+      ) l1 on l1.l_partkey = t.t_partkey
+   ) a
+where l_quantity < t_avg_quantity;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q18_large_volume_customer.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q18_large_volume_customer.hive
new file mode 100644
index 0000000..ac2902c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q18_large_volume_customer.hive
@@ -0,0 +1,43 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q18_tmp;
+DROP TABLE IF EXISTS q18_large_volume_customer;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+
+-- create the result tables
+create table q18_tmp(l_orderkey int, t_sum_quantity double);
+create table q18_large_volume_customer(c_name string, c_custkey int, o_orderkey int, o_orderdate string, o_totalprice double, sum_quantity double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1164000000;
+
+-- the query
+insert overwrite table q18_tmp
+select 
+  l_orderkey, sum(l_quantity) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q18_large_volume_customer
+select 
+  c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice,sum(l_quantity)
+from 
+  customer c join orders o 
+  on 
+    c.c_custkey = o.o_custkey
+  join q18_tmp t 
+  on 
+    o.o_orderkey = t.l_orderkey and t.t_sum_quantity > 30
+  join lineitem l 
+  on 
+    o.o_orderkey = l.l_orderkey
+group by c_name,c_custkey,o_orderkey,o_orderdate,o_totalprice
+order by o_totalprice desc,o_orderdate
+limit 100;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q19_discounted_revenue.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q19_discounted_revenue.hive
new file mode 100644
index 0000000..2002e1e
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q19_discounted_revenue.hive
@@ -0,0 +1,49 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q19_discounted_revenue;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the result table
+create table q19_discounted_revenue(revenue double);
+
+set mapred.min.split.size=268435456;
+set hive.exec.reducers.bytes.per.reducer=1040000000;
+
+-- the query
+insert overwrite table q19_discounted_revenue
+select
+  sum(l_extendedprice * (1 - l_discount) ) as revenue
+from
+  part p join lineitem l 
+  on 
+    p.p_partkey = l.l_partkey    
+where
+  (
+    p_brand = 'Brand#12'
+	and p_container REGEXP 'SM CASE||SM BOX||SM PACK||SM PKG'
+	and l_quantity >= 1 and l_quantity <= 11
+	and p_size >= 1 and p_size <= 5
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  ) 
+  or 
+  (
+    p_brand = 'Brand#23'
+	and p_container REGEXP 'MED BAG||MED BOX||MED PKG||MED PACK'
+	and l_quantity >= 10 and l_quantity <= 20
+	and p_size >= 1 and p_size <= 10
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  )
+  or
+  (
+	p_brand = 'Brand#34'
+	and p_container REGEXP 'LG CASE||LG BOX||LG PACK||LG PKG'
+	and l_quantity >= 20 and l_quantity <= 30
+	and p_size >= 1 and p_size <= 15
+	and l_shipmode REGEXP 'AIR||AIR REG'
+	and l_shipinstruct = 'DELIVER IN PERSON'
+  );
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q1_pricing_summary_report.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q1_pricing_summary_report.hive
new file mode 100644
index 0000000..a002068
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q1_pricing_summary_report.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q1_pricing_summary_report;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+CREATE TABLE q1_pricing_summary_report ( L_RETURNFLAG STRING, L_LINESTATUS STRING, SUM_QTY DOUBLE, SUM_BASE_PRICE DOUBLE, SUM_DISC_PRICE DOUBLE, SUM_CHARGE DOUBLE, AVE_QTY DOUBLE, AVE_PRICE DOUBLE, AVE_DISC DOUBLE, COUNT_ORDER INT);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+INSERT OVERWRITE TABLE q1_pricing_summary_report 
+SELECT 
+  L_RETURNFLAG, L_LINESTATUS, SUM(L_QUANTITY), SUM(L_EXTENDEDPRICE), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)), SUM(L_EXTENDEDPRICE*(1-L_DISCOUNT)*(1+L_TAX)), AVG(L_QUANTITY), AVG(L_EXTENDEDPRICE), AVG(L_DISCOUNT), COUNT(1) 
+FROM 
+  lineitem 
+WHERE 
+  L_SHIPDATE<='1998-09-02' 
+GROUP BY L_RETURNFLAG, L_LINESTATUS 
+ORDER BY L_RETURNFLAG, L_LINESTATUS;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q20_potential_part_promotion.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q20_potential_part_promotion.hive
new file mode 100644
index 0000000..2bb90ea
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q20_potential_part_promotion.hive
@@ -0,0 +1,73 @@
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q20_tmp1;
+DROP TABLE IF EXISTS q20_tmp2;
+DROP TABLE IF EXISTS q20_tmp3;
+DROP TABLE IF EXISTS q20_tmp4;
+DROP TABLE IF EXISTS q20_potential_part_promotion;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+
+-- create the target table
+create table q20_tmp1(p_partkey int);
+create table q20_tmp2(l_partkey int, l_suppkey int, sum_quantity double);
+create table q20_tmp3(ps_suppkey int, ps_availqty int, sum_quantity double);
+create table q20_tmp4(ps_suppkey int);
+create table q20_potential_part_promotion(s_name string, s_address string);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q20_tmp1
+select distinct p_partkey
+from
+  part;
+
+insert overwrite table q20_tmp2
+select 
+  l_partkey, l_suppkey, 0.5 * sum(l_quantity)
+from
+  lineitem
+group by l_partkey, l_suppkey;
+
+insert overwrite table q20_tmp3
+select 
+  ps_suppkey, ps_availqty, sum_quantity
+from  
+  partsupp ps join q20_tmp1 t1 
+  on 
+    ps.ps_partkey = t1.p_partkey
+  join q20_tmp2 t2 
+  on 
+    ps.ps_partkey = t2.l_partkey and ps.ps_suppkey = t2.l_suppkey;
+
+insert overwrite table q20_tmp4
+select 
+  ps_suppkey
+from 
+  q20_tmp3
+where 
+  ps_availqty > sum_quantity
+group by ps_suppkey;
+
+insert overwrite table q20_potential_part_promotion
+select 
+  s_name, s_address
+from 
+   nation n join supplier s
+  on
+    s.s_nationkey = n.n_nationkey
+  join q20_tmp4 t4
+  on 
+    s.s_suppkey = t4.ps_suppkey
+order by s_name;
+
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q21_suppliers_who_kept_orders_waiting.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q21_suppliers_who_kept_orders_waiting.hive
new file mode 100644
index 0000000..9d01741
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q21_suppliers_who_kept_orders_waiting.hive
@@ -0,0 +1,71 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q21_tmp1;
+DROP TABLE IF EXISTS q21_tmp2;
+DROP TABLE IF EXISTS q21_suppliers_who_kept_orders_waiting;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create target tables
+create table q21_tmp1(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_tmp2(l_orderkey int, count_suppkey int, max_suppkey int);
+create table q21_suppliers_who_kept_orders_waiting(s_name string, numwait int);
+
+-- the query
+insert overwrite table q21_tmp1
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+group by l_orderkey;
+
+insert overwrite table q21_tmp2
+select
+  l_orderkey, count(distinct l_suppkey), max(l_suppkey) as max_suppkey
+from
+  lineitem
+where
+  l_receiptdate > l_commitdate
+group by l_orderkey;
+
+insert overwrite table q21_suppliers_who_kept_orders_waiting
+select
+  s_name, count(1) as numwait
+from
+  (select s_name from
+(select s_name, t2.l_orderkey, l_suppkey, count_suppkey, max_suppkey 
+ from q21_tmp2 t2 right outer join
+      (select s_name, l_orderkey, l_suppkey from
+         (select s_name, t1.l_orderkey, l_suppkey, count_suppkey, max_suppkey
+          from
+            q21_tmp1 t1 join
+            (select s_name, l_orderkey, l_suppkey
+             from 
+               orders o join
+               (select s_name, l_orderkey, l_suppkey
+                from
+                  nation n join supplier s
+                  on
+                    s.s_nationkey = n.n_nationkey
+                  join lineitem l
+                  on
+                    s.s_suppkey = l.l_suppkey
+                where
+                  l.l_receiptdate > l.l_commitdate
+                ) l1 on o.o_orderkey = l1.l_orderkey
+             ) l2 on l2.l_orderkey = t1.l_orderkey
+          ) a
+          where
+           (count_suppkey >= 0)
+       ) l3 on l3.l_orderkey = t2.l_orderkey
+    ) b
+  )c
+group by s_name
+order by numwait desc, s_name
+limit 100;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q22_global_sales_opportunity.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q22_global_sales_opportunity.hive
new file mode 100644
index 0000000..851a8b4
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q22_global_sales_opportunity.hive
@@ -0,0 +1,60 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS q22_customer_tmp;
+DROP TABLE IF EXISTS q22_customer_tmp1;
+DROP TABLE IF EXISTS q22_orders_tmp;
+DROP TABLE IF EXISTS q22_global_sales_opportunity;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+
+-- create target tables
+create table q22_customer_tmp(c_acctbal double, c_custkey int, cntrycode string);
+create table q22_customer_tmp1(avg_acctbal double);
+create table q22_orders_tmp(o_custkey int);
+create table q22_global_sales_opportunity(cntrycode string, numcust int, totacctbal double);
+
+-- the query
+insert overwrite table q22_customer_tmp
+select 
+  c_acctbal, c_custkey, substr(c_phone, 1, 2) as cntrycode
+from 
+  customer;
+ 
+insert overwrite table q22_customer_tmp1
+select
+  avg(c_acctbal)
+from
+  q22_customer_tmp
+where
+  c_acctbal > 0.00;
+
+insert overwrite table q22_orders_tmp
+select 
+  o_custkey 
+from 
+  orders
+group by 
+  o_custkey;
+
+insert overwrite table q22_global_sales_opportunity
+select
+  cntrycode, count(1) as numcust, sum(c_acctbal) as totacctbal
+from
+(
+  select cntrycode, c_acctbal, avg_acctbal from
+  q22_customer_tmp1 ct1 join
+  (
+    select cntrycode, c_acctbal from
+      q22_orders_tmp ot 
+      right outer join q22_customer_tmp ct 
+      on
+        ct.c_custkey = ot.o_custkey
+  ) ct2
+) a
+where
+  c_acctbal > avg_acctbal
+group by cntrycode
+order by cntrycode;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q2_minimum_cost_supplier.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q2_minimum_cost_supplier.hive
new file mode 100644
index 0000000..200b99f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q2_minimum_cost_supplier.hive
@@ -0,0 +1,56 @@
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp1;
+DROP TABLE IF EXISTS q2_minimum_cost_supplier_tmp2;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create result tables
+create table q2_minimum_cost_supplier_tmp1 (s_acctbal double, s_name string, n_name string, p_partkey int, ps_supplycost double, p_mfgr string, s_address string, s_phone string, s_comment string);
+create table q2_minimum_cost_supplier_tmp2 (p_partkey int, ps_min_supplycost double);
+create table q2_minimum_cost_supplier (s_acctbal double, s_name string, n_name string, p_partkey int, p_mfgr string, s_address string, s_phone string, s_comment string);
+
+-- the query
+insert overwrite table q2_minimum_cost_supplier_tmp1 
+select 
+  s.s_acctbal, s.s_name, n.n_name, p.p_partkey, ps.ps_supplycost, p.p_mfgr, s.s_address, s.s_phone, s.s_comment 
+from 
+  nation n join region r 
+  on 
+    n.n_regionkey = r.r_regionkey and r.r_name = 'EUROPE' 
+  join supplier s 
+  on 
+s.s_nationkey = n.n_nationkey 
+  join partsupp ps 
+  on  
+s.s_suppkey = ps.ps_suppkey 
+  join part p 
+  on 
+    p.p_partkey = ps.ps_partkey and p.p_type like '%BRASS' ;
+
+insert overwrite table q2_minimum_cost_supplier_tmp2 
+select 
+  p_partkey, min(ps_supplycost) 
+from  
+  q2_minimum_cost_supplier_tmp1 
+group by p_partkey;
+
+insert overwrite table q2_minimum_cost_supplier 
+select 
+  t1.s_acctbal, t1.s_name, t1.n_name, t1.p_partkey, t1.p_mfgr, t1.s_address, t1.s_phone, t1.s_comment 
+from 
+  q2_minimum_cost_supplier_tmp1 t1 join q2_minimum_cost_supplier_tmp2 t2 
+on 
+  t1.p_partkey = t2.p_partkey and t1.ps_supplycost=t2.ps_min_supplycost 
+order by s_acctbal desc, n_name, s_name, p_partkey 
+limit 100;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q3_shipping_priority.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q3_shipping_priority.hive
new file mode 100644
index 0000000..0049eb3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q3_shipping_priority.hive
@@ -0,0 +1,29 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS q3_shipping_priority;
+
+-- create tables and load data
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+
+-- create the target table
+create table q3_shipping_priority (l_orderkey int, revenue double, o_orderdate string, o_shippriority int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+Insert overwrite table q3_shipping_priority 
+select 
+  l_orderkey, (l_extendedprice*(1-l_discount)) as revenue, o_orderdate, o_shippriority 
+from 
+  customer c join orders o 
+    on c.c_mktsegment = 'BUILDING' and c.c_custkey = o.o_custkey 
+  join lineitem l 
+    on l.l_orderkey = o.o_orderkey and l.l_linenumber<3
+-- group by l_orderkey, o_orderdate, o_shippriority 
+order by revenue desc
+limit 10;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q4_order_priority.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q4_order_priority.hive
new file mode 100644
index 0000000..aa828e9
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q4_order_priority.hive
@@ -0,0 +1,30 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q4_order_priority_tmp;
+DROP TABLE IF EXISTS q4_order_priority;
+
+-- create tables and load data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+CREATE TABLE q4_order_priority_tmp (O_ORDERKEY INT);
+CREATE TABLE q4_order_priority (O_ORDERPRIORITY STRING, ORDER_COUNT INT);
+
+set mapred.min.split.size=536870912;
+-- the query
+INSERT OVERWRITE TABLE q4_order_priority_tmp 
+select 
+  DISTINCT l_orderkey 
+from 
+  lineitem 
+where 
+  l_commitdate < l_receiptdate;
+INSERT OVERWRITE TABLE q4_order_priority 
+select o_orderpriority, count(1) as order_count 
+from 
+  orders o join q4_order_priority_tmp t 
+  on 
+o.o_orderkey = t.o_orderkey and o.o_orderdate >= '1993-07-01' and o.o_orderdate < '1993-10-01' 
+group by o_orderpriority 
+order by o_orderpriority;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q5_local_supplier_volume.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q5_local_supplier_volume.hive
new file mode 100644
index 0000000..9af2dd2
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q5_local_supplier_volume.hive
@@ -0,0 +1,42 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS q5_local_supplier_volume;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create the target table
+create table q5_local_supplier_volume (N_NAME STRING, REVENUE DOUBLE);
+
+set mapred.min.split.size=536870912;
+
+-- the query
+insert overwrite table q5_local_supplier_volume 
+select 
+  n_name, sum(l_extendedprice * (1 - l_discount)) as revenue 
+from
+  customer c join
+    ( select n_name, l_extendedprice, l_discount, s_nationkey, o_custkey from 
+      ( select n_name, l_extendedprice, l_discount, l_orderkey, s_nationkey from
+        ( select n_name, s_suppkey, s_nationkey from 
+          ( select n_name, n_nationkey 
+            from nation n join region r 
+            on n.n_regionkey = r.r_regionkey
+          ) n1 join supplier s on s.s_nationkey = n1.n_nationkey
+        ) s1 join lineitem l on l.l_suppkey = s1.s_suppkey
+      ) l1 join orders o on l1.l_orderkey = o.o_orderkey and o.o_orderdate >= '1990-01-01' 
+              and o.o_orderdate < '1995-01-01'
+) o1 
+on c.c_nationkey = o1.s_nationkey and c.c_custkey = o1.o_custkey
+group by n_name 
+order by revenue desc;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q6_forecast_revenue_change.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q6_forecast_revenue_change.hive
new file mode 100644
index 0000000..d8cb9b9
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q6_forecast_revenue_change.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS q6_forecast_revenue_change;
+
+-- create tables and load data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the target table
+create table q6_forecast_revenue_change (revenue double);
+
+-- the query
+insert overwrite table q6_forecast_revenue_change 
+select 
+  sum(l_extendedprice*l_discount) as revenue
+from 
+  lineitem
+where 
+  l_shipdate >= '1994-01-01'
+  and l_shipdate < '1995-01-01'
+  and l_discount >= 0.05 and l_discount <= 0.07
+  and l_quantity < 24;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q7_volume_shipping.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q7_volume_shipping.hive
new file mode 100644
index 0000000..2678f80
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q7_volume_shipping.hive
@@ -0,0 +1,71 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q7_volume_shipping;
+DROP TABLE IF EXISTS q7_volume_shipping_tmp;
+
+-- create tables and load data
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the target table
+create table q7_volume_shipping (supp_nation string, cust_nation string, l_year int, revenue double);
+create table q7_volume_shipping_tmp(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1225000000;
+
+-- the query
+insert overwrite table q7_volume_shipping_tmp
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n2.n_name = 'GERMANY'
+    UNION ALL
+select 
+  n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey, 
+  n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 
+  on 
+    n1.n_name = 'GERMANY'
+) a;
+
+insert overwrite table q7_volume_shipping 
+select 
+  supp_nation, cust_nation, l_year, sum(volume) as revenue
+from 
+  (
+    select
+      supp_nation, cust_nation, year(l_shipdate) as l_year, 
+      l_extendedprice * (1 - l_discount) as volume
+    from
+      q7_volume_shipping_tmp t join
+        (select l_shipdate, l_extendedprice, l_discount, c_nationkey, s_nationkey 
+         from supplier s join
+           (select l_shipdate, l_extendedprice, l_discount, l_suppkey, c_nationkey 
+            from customer c join
+              (select l_shipdate, l_extendedprice, l_discount, l_suppkey, o_custkey 
+               from orders o join lineitem l 
+               on 
+                 o.o_orderkey = l.l_orderkey and l.l_shipdate >= '1992-01-01' 
+                 and l.l_shipdate <= '1996-12-31'
+               ) l1 on c.c_custkey = l1.o_custkey
+            ) l2 on s.s_suppkey = l2.l_suppkey
+         ) l3 on l3.c_nationkey = t.c_nationkey and l3.s_nationkey = t.s_nationkey
+   ) shipping
+group by supp_nation, cust_nation, l_year
+order by supp_nation, cust_nation, l_year;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q8_national_market_share.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q8_national_market_share.hive
new file mode 100644
index 0000000..4d9d36f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q8_national_market_share.hive
@@ -0,0 +1,56 @@
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS region;
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS q8_national_market_share;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create external table region (R_REGIONKEY INT, R_NAME STRING, R_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/region';
+
+-- create the result table
+create table q8_national_market_share(o_year string, mkt_share double);
+
+-- the query
+insert overwrite table q8_national_market_share 
+select 
+  o_year, sum(case when nation = 'BRAZIL' then volume else 0.0 end) / sum(volume) as mkt_share
+from 
+  (
+select 
+  year(o_orderdate) as o_year, l_extendedprice * (1-l_discount) as volume, 
+  n2.n_name as nation
+    from
+      nation n2 join
+        (select o_orderdate, l_discount, l_extendedprice, s_nationkey 
+         from 
+          (select o_orderdate, l_discount, l_extendedprice, l_suppkey 
+           from part p join
+             (select o_orderdate, l_partkey, l_discount, l_extendedprice, l_suppkey 
+              from 
+                (select o_orderdate, o_orderkey 
+                 from 
+                   (select c.c_custkey 
+                    from 
+                      (select n1.n_nationkey 
+                       from nation n1 join region r
+                       on n1.n_regionkey = r.r_regionkey and r.r_name = 'AMERICA'
+                       ) n11 join customer c on c.c_nationkey = n11.n_nationkey
+                    ) c1 join orders o on c1.c_custkey = o.o_custkey
+                 ) o1 join lineitem l on l.l_orderkey = o1.o_orderkey and o1.o_orderdate >= '1995-01-01' 
+                         and o1.o_orderdate < '1996-12-31'
+              ) l1 on p.p_partkey = l1.l_partkey and p.p_type = 'ECONOMY ANODIZED STEEL'
+           ) p1 join supplier s on s.s_suppkey = p1.l_suppkey
+        ) s1 on s1.s_nationkey = n2.n_nationkey
+  ) all_nation
+group by o_year
+order by o_year;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q9_product_type_profit.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q9_product_type_profit.hive
new file mode 100644
index 0000000..2e5b4a1
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/q9_product_type_profit.hive
@@ -0,0 +1,50 @@
+DROP TABLE IF EXISTS part;
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS supplier;
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS partsupp;
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS q9_product_type_profit;
+
+-- create the tables and load the data
+create external table part (P_PARTKEY INT, P_NAME STRING, P_MFGR STRING, P_BRAND STRING, P_TYPE STRING, P_SIZE INT, P_CONTAINER STRING, P_RETAILPRICE DOUBLE, P_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/part';
+Create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';
+create external table partsupp (PS_PARTKEY INT, PS_SUPPKEY INT, PS_AVAILQTY INT, PS_SUPPLYCOST DOUBLE, PS_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION'/tpch/partsupp';
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the result table
+create table q9_product_type_profit (nation string, o_year string, sum_profit double);
+
+set mapred.min.split.size=536870912;
+set hive.exec.reducers.bytes.per.reducer=1024000000;
+
+-- the query
+insert overwrite table q9_product_type_profit
+select 
+  nation, o_year, sum(amount) as sum_profit
+from 
+  (
+select 
+  n_name as nation, year(o_orderdate) as o_year, 
+  l_extendedprice * (1 - l_discount) -  ps_supplycost * l_quantity as amount
+    from
+      (select l_extendedprice, l_discount, l_quantity, l_orderkey, n_name, ps_supplycost 
+       from part p join
+         (select l_extendedprice, l_discount, l_quantity, l_partkey, l_orderkey, 
+                 n_name, ps_supplycost 
+          from partsupp ps join
+            (select l_suppkey, l_extendedprice, l_discount, l_quantity, l_partkey, 
+                    l_orderkey, n_name 
+             from
+               (select s_suppkey, n_name 
+                from nation n join supplier s on n.n_nationkey = s.s_nationkey
+               ) s1 join lineitem l on s1.s_suppkey = l.l_suppkey
+            ) l1 on ps.ps_suppkey = l1.l_suppkey and ps.ps_partkey = l1.l_partkey
+         ) l2 on p.p_name like '%green%' and p.p_partkey = l2.l_partkey
+     ) l3 join orders o on o.o_orderkey = l3.l_orderkey
+  )profit
+group by nation, o_year
+order by nation, o_year desc;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u10_join.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u10_join.hive
new file mode 100644
index 0000000..1d901c2
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u10_join.hive
@@ -0,0 +1,15 @@
+DROP TABLE IF EXISTS orders;
+DROP TABLE IF EXISTS customer;
+DROP TABLE IF EXISTS u10_join;
+
+-- create the tables and load the data
+create external table orders (O_ORDERKEY INT, O_CUSTKEY INT, O_ORDERSTATUS STRING, O_TOTALPRICE DOUBLE, O_ORDERDATE STRING, O_ORDERPRIORITY STRING, O_CLERK STRING, O_SHIPPRIORITY INT, O_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/orders';
+create external table customer (C_CUSTKEY INT, C_NAME STRING, C_ADDRESS STRING, C_NATIONKEY INT, C_PHONE STRING, C_ACCTBAL DOUBLE, C_MKTSEGMENT STRING, C_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/customer';
+
+create table u10_join(O_ORDERSTATUS STRING);
+
+insert overwrite table u10_join
+select O_TOTALPRICE
+from orders join customer
+on orders.O_CUSTKEY=customer.C_CUSTKEY
+order by O_TOTALPRICE;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u10_nestedloop_join.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u10_nestedloop_join.hive
new file mode 100644
index 0000000..8fc0a7a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u10_nestedloop_join.hive
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS u10_nestedloop_join;
+
+-- create tables and load data
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+
+-- create the target table
+create table u10_nestedloop_join(supp_nation string, cust_nation string, s_nationkey int, c_nationkey int);
+
+-- the query
+insert overwrite table u10_nestedloop_join
+select 
+  * 
+from
+  (
+    select 
+      n1.n_name as supp_nation, n2.n_name as cust_nation, n1.n_nationkey as s_nationkey,      
+      n2.n_nationkey as c_nationkey
+from 
+  nation n1 join nation n2 where n1.n_nationkey > n2.n_nationkey
+) a order by a.supp_nation, a.cust_nation;
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u1_gby.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u1_gby.hive
new file mode 100644
index 0000000..6e53d01
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u1_gby.hive
@@ -0,0 +1,11 @@
+DROP TABLE IF EXISTS nation;
+DROP TABLE IF EXISTS u1_gby;
+
+-- create tables and load data
+create external table nation (N_NATIONKEY INT, N_NAME STRING, N_REGIONKEY INT, N_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/nation';
+create table u1_gby(col1 INT, col2 INT);
+
+insert overwrite table u1_gby select N_REGIONKEY, count(1)
+from nation
+group by N_REGIONKEY
+order by N_REGIONKEY;
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u2_gby_external.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u2_gby_external.hive
new file mode 100644
index 0000000..be9de2d
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u2_gby_external.hive
@@ -0,0 +1,19 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS u2_gby_external;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the result tables
+create table u2_gby_external(l_partkey int, t_sum_quantity double);
+
+-- the query
+insert overwrite table u2_gby_external
+select 
+  l_orderkey, avg(L_QUANTITY) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey
+order by l_orderkey desc
+limit 10;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u3_union.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u3_union.hive
new file mode 100644
index 0000000..99d62df
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u3_union.hive
@@ -0,0 +1,11 @@
+drop table IF EXISTS supplier;

+drop table IF EXISTS u3_union;

+

+create external table supplier (S_SUPPKEY INT, S_NAME STRING, S_ADDRESS STRING, S_NATIONKEY INT, S_PHONE STRING, S_ACCTBAL DOUBLE, S_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/supplier';

+create table u3_union (S_SUPPKEY INT, S_ADDRESS STRING, S_NATIONKEY INT,  S_NAME STRING);

+

+insert overwrite table u3_union 

+select * from (select (2*s_suppkey), s_address, s_nationkey,  s_name  FROM supplier where S_SUPPKEY*2 < 20 

+union all 

+select (2*s_suppkey), s_address, s_nationkey,  s_name   FROM supplier where S_SUPPKEY*2 > 50) t

+order by t.s_address;

diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u4_gby_distinct.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u4_gby_distinct.hive
new file mode 100644
index 0000000..6cd4a5b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u4_gby_distinct.hive
@@ -0,0 +1,19 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS u4_gby_distinct;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the result tables
+create table u4_gby_distinct(l_partkey int, t_sum_quantity double);
+
+-- the query
+insert overwrite table u4_gby_distinct
+select 
+  l_orderkey, avg(distinct L_QUANTITY) as t_sum_quantity
+from 
+  lineitem
+group by l_orderkey
+order by l_orderkey desc
+limit 10;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u5_gby_global.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u5_gby_global.hive
new file mode 100644
index 0000000..cef7e2b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u5_gby_global.hive
@@ -0,0 +1,16 @@
+DROP TABLE IF EXISTS lineitem;
+DROP TABLE IF EXISTS u5_gby_global;
+
+-- create the tables and load the data
+create external table lineitem (L_ORDERKEY INT, L_PARTKEY INT, L_SUPPKEY INT, L_LINENUMBER INT, L_QUANTITY DOUBLE, L_EXTENDEDPRICE DOUBLE, L_DISCOUNT DOUBLE, L_TAX DOUBLE, L_RETURNFLAG STRING, L_LINESTATUS STRING, L_SHIPDATE STRING, L_COMMITDATE STRING, L_RECEIPTDATE STRING, L_SHIPINSTRUCT STRING, L_SHIPMODE STRING, L_COMMENT STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS TEXTFILE LOCATION '/tpch/lineitem';
+
+-- create the result tables
+create table u5_gby_global(t_sum_quantity double);
+
+-- the query
+insert overwrite table u5_gby_global
+select 
+  sum(L_QUANTITY) as t_sum_quantity
+from 
+  lineitem;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u6_large_card_join.hive b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u6_large_card_join.hive
new file mode 100644
index 0000000..5bf560b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/queries/u6_large_card_join.hive
@@ -0,0 +1,23 @@
+DROP TABLE IF EXISTS joinsrc1;
+DROP TABLE IF EXISTS joinsrc2;
+DROP TABLE IF EXISTS u6_large_card_join;
+
+-- create the tables and load the data
+create external table joinsrc1 (ID_1 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' 
+STORED AS TEXTFILE LOCATION '/test/joinsrc1';
+
+create external table joinsrc2 (ID_2 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' 
+STORED AS TEXTFILE LOCATION '/test/joinsrc2';
+
+-- create the result tables
+create table u6_large_card_join(col1 int, col2 int);
+
+-- the query
+insert overwrite table u6_large_card_join
+select 
+   ID_1, ID_2
+from 
+  joinsrc1 join joinsrc2
+on
+  joinsrc1.ID_1=joinsrc2.ID_2;
+
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q10_returned_item.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q10_returned_item.result
new file mode 100644
index 0000000..6cc8ef4
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q10_returned_item.result
@@ -0,0 +1,20 @@
+121Customer#000000121282635.171899999966428.32PERUtv nCR2YKupGN73mQudO27-411-990-2959uriously stealthy ideas. carefully final courts use carefully
+124Customer#000000124222182.51881842.49CHINAaTbyVAW5tCd,v09O28-183-750-7809le fluffily even dependencies. quietly s
+106Customer#000000106190241.33343288.42ARGENTINAxGCOEAUjUNG11-751-989-4627lose slyly. ironic accounts along the evenly regular theodolites wake about the special, final gifts. 
+16Customer#000000016161422.046099999984681.03IRANcYiaeMLZSMAOQ2 d0W,20-781-609-3107kly silent courts. thinly regular theodolites sleep fluffily after 
+44Customer#000000044149364.565199999987315.94MOZAMBIQUEOi,dOSPwDu4jo4x,,P85E0dmhZGvNtBwi26-190-260-5375r requests around the unusual, bold a
+71Customer#000000071129481.02450000001-611.19GERMANYTlGalgdXWBmMV,6agLyWYDyIz9MKzcY8gl,w6t1B17-710-812-5403g courts across the regular, final pinto beans are blithely pending ac
+89Customer#000000089121663.12431530.76KENYAdtR, y9JQWUO6FoJExyp8whOU24-394-451-5404counts are slyly beyond the slyly final accounts. quickly final ideas wake. r
+112Customer#000000112111137.714099999982953.35ROMANIARcfgG3bO7QeCnfjqJT129-233-262-8382rmanently unusual multipliers. blithely ruthless deposits are furiously along the
+62Customer#000000062106368.0153595.61GERMANYupJK2Dnw13,17-361-978-7059kly special dolphins. pinto beans are slyly. quickly regular accounts are furiously a
+146Customer#000000146103265.988799999993328.68CANADAGdxkdXG9u7iyI1,,y5tq4ZyrcEy13-835-723-3223ffily regular dinos are slyly unusual requests. slyly specia
+19Customer#00000001999306.012700000028914.71CHINAuc,3bHIx84H,wdrmLOjVsiqXCq2tr28-396-526-5053 nag. furiously careful packages are slyly at the accounts. furiously regular in
+145Customer#00000014599256.90189748.93JORDANkQjHmt2kcec cy3hfMh969u23-562-444-8454ests? express, express instructions use. blithely fina
+103Customer#00000010397311.772400000022757.45INDONESIA8KIsQX4LJ7QMsj6DrtFtXu0nUEdV,8a19-216-107-2107furiously pending notornis boost slyly around the blithely ironic ideas? final, even instructions cajole fl
+136Customer#00000013695855.39799999999-842.39GERMANYQoLsJ0v5C1IQbh,DS117-501-210-4726ackages sleep ironic, final courts. even requests above the blithely bold requests g
+53Customer#00000005392568.91244113.64MOROCCOHnaxHzTfFTZs8MuCpJyTbZ47Cm4wFOOgib25-168-852-5363ar accounts are. even foxes are blithely. fluffily pending deposits boost
+49Customer#00000004990965.72624573.94IRANcNgAeX7Fqrdf7HQN9EwjUa4nxT,68L FKAxzl20-908-631-4424nusual foxes! fluffily pending packages maintain to the regular 
+37Customer#00000003788065.74579999999-917.75INDIA7EV4Pwh,3SboctTWt18-385-235-7162ilent packages are carefully among the deposits. furiousl
+82Customer#00000008286998.96449468.34CHINAzhG3EZbap4c992Gj3bK,3Ne,Xn28-159-442-5305s wake. bravely regular accounts are furiously. regula
+125Customer#00000012584808.068-234.12ROMANIA,wSZXdVR xxIIfm9s8ITyLl3kgjT6UC07GY0Y29-261-996-3120x-ray finally after the packages? regular requests c
+59Customer#00000005984655.57113458.6ARGENTINAzLOCP0wh92OtBihgspOGl411-355-584-3112ously final packages haggle blithely after the express deposits. furiou
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q11_important_stock.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q11_important_stock.result
new file mode 100644
index 0000000..cdc01d2
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q11_important_stock.result
@@ -0,0 +1,200 @@
+252.832302068E7
+1242.59627599E7
+1752.385395363E7
+1972.248551967E7
+1632.099460571E7
+1602.00232846E7
+821.9919213349999998E7
+1691.898734723E7
+291.867279344E7
+261.8612458270000003E7
+731.827170729E7
+1611.798746301E7
+751.7959598009999998E7
+341.7780838360000003E7
+981.7763191509999998E7
+691.728526943E7
+1111.708388262E7
+1711.635442066E7
+1661.635189374E7
+771.598059909E7
+781.58768992E7
+1431.585686159E7
+171.5474261120000001E7
+1091.5054682620000001E7
+1051.5053163809999999E7
+961.495213259E7
+1461.4810759440000001E7
+1361.4654967749999998E7
+1161.443209134E7
+1281.4393555260000002E7
+1421.422039904E7
+1211.420032605E7
+301.4163132409999998E7
+161.413646503E7
+1981.413535335E7
+791.38652287E7
+901.3732797480000002E7
+321.369962979E7
+741.3388711109999998E7
+11.3378707239999998E7
+891.337148041E7
+221.335499174E7
+1861.317604077E7
+1891.305492542E7
+141.299397721E7
+931.299298218E7
+1681.299041501E7
+991.2750046790000001E7
+1671.268255069E7
+21.258471636E7
+1821.256239411E7
+611.253677656E7
+1121.234957975E7
+1781.2260301739999998E7
+1721.219775193E7
+1651.219746506E7
+1841.216784393E7
+1871.214970141E7
+1531.211935422E7
+951.20468895E7
+111.200715156E7
+1251.200347611E7
+1541.1851133850000001E7
+151.179843879E7
+671.178579951E7
+81.170789262E7
+871.168637671E7
+1341.1683586929999998E7
+1301.168246149E7
+431.161150462E7
+1021.151554211E7
+211.141066856E7
+621.138927324E7
+91.126484373E7
+801.118329032E7
+1731.1026774860000001E7
+941.092440116E7
+31.075814545E7
+1031.0691221600000001E7
+1581.067861635E7
+491.06445572E7
+1391.044045371E7
+1921.035745974E7
+241.033911936E7
+391.03210148E7
+1561.014364082E7
+1881.011906085E7
+121.0108587399999999E7
+331.005296264E7
+281.005234286E7
+409927827.77
+1999907803.559999999
+1939869674.77
+1069869361.73
+1089868370.31
+1839855564.82
+709700431.94
+489655921.88
+1189622756.149999999
+139592610.32
+839543465.079999998
+1599519909.44
+1479513932.18
+459423874.47
+1179408426.72
+1359311247.28
+1859305341.780000001
+1319223742.49
+79175528.209999999
+719167712.04
+1009131099.530000001
+769092927.110000001
+538979121.97
+1418686511.12
+648627897.290000001
+1018521762.0
+1768510175.88
+198481679.5
+1948464559.54
+918460636.52
+1328416851.24
+1138405217.959999999
+518247118.499999999
+418187897.16
+558092552.890000001
+728007155.3
+1157954624.0
+1707895241.609999999
+1147832023.279999999
+377809598.66
+547578243.79
+1807531794.4799999995
+607508961.6899999995
+317433034.240000001
+357132671.49
+1407122050.08
+1507106237.92
+1077082828.68
+1237049500.720000001
+1907017966.9
+1206920857.09
+1966905182.43
+1776887257.27
+1266813302.029999999
+1226812763.340000001
+2006780024.53
+1576766365.68
+636724960.14
+386667789.55
+586640619.38
+1456633786.590000001
+1446546945.92
+206533101.39
+1276483139.620000001
+106433776.51
+366410209.249999999
+476407355.369999999
+1916347187.43
+1376180452.850000001
+566145826.6
+1046134341.850000001
+446038126.659999999
+976036047.1899999995
+1815853464.149999999
+1625829410.54
+865746713.88
+525680644.4799999995
+1555552007.57
+925489588.28
+55461046.93
+185456316.21
+1495367514.630000001
+1105261352.11
+45162989.07
+65120392.47
+1485061589.27
+424957032.47
+1194954403.48
+844891082.38
+654834763.09
+664719253.369999999
+1794610607.92
+234531731.12
+684504770.61
+274371849.52
+1293997604.7800000003
+1953817436.31
+593765210.2100000004
+573739347.1199999996
+1383567425.75
+1743484708.31
+1643462215.0
+813421610.4200000004
+463398443.33
+853338711.3899999997
+503145791.97
+883117730.2399999998
+1512727444.22
+1521837809.1700000002
+1331517282.33
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q12_shipping.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q12_shipping.result
new file mode 100644
index 0000000..bb95677
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q12_shipping.result
@@ -0,0 +1,2 @@
+MAIL5.05.0
+SHIP5.010.0
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q13_customer_distribution.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q13_customer_distribution.result
new file mode 100644
index 0000000..beaa047
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q13_customer_distribution.result
@@ -0,0 +1,27 @@
+050
+168
+177
+206
+136
+126
+96
+235
+145
+105
+214
+184
+114
+84
+74
+263
+223
+63
+53
+43
+292
+242
+192
+152
+281
+251
+31
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q14_promotion_effect.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q14_promotion_effect.result
new file mode 100644
index 0000000..3b823e7
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q14_promotion_effect.result
@@ -0,0 +1 @@
+15.230212611597251
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q15_top_supplier.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q15_top_supplier.result
new file mode 100644
index 0000000..d975521
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q15_top_supplier.result
@@ -0,0 +1 @@
+10Supplier#000000010Saygah3gYWMp72i PY34-852-489-8585797313.3838
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q16_parts_supplier_relationship.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q16_parts_supplier_relationship.result
new file mode 100644
index 0000000..393a33a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q16_parts_supplier_relationship.result
@@ -0,0 +1,34 @@
+Brand#11PROMO ANODIZED TIN454
+Brand#11SMALL PLATED COPPER454
+Brand#11STANDARD POLISHED TIN454
+Brand#13MEDIUM ANODIZED STEEL364
+Brand#14SMALL ANODIZED NICKEL454
+Brand#15LARGE ANODIZED BRASS454
+Brand#21LARGE BURNISHED COPPER194
+Brand#23ECONOMY BRUSHED COPPER94
+Brand#25MEDIUM PLATED BRASS454
+Brand#31ECONOMY PLATED STEEL234
+Brand#31PROMO POLISHED TIN234
+Brand#32MEDIUM BURNISHED BRASS494
+Brand#33LARGE BRUSHED TIN364
+Brand#33SMALL BURNISHED NICKEL34
+Brand#34LARGE PLATED BRASS454
+Brand#34MEDIUM BRUSHED COPPER94
+Brand#34SMALL PLATED BRASS144
+Brand#35STANDARD ANODIZED STEEL234
+Brand#43PROMO POLISHED BRASS194
+Brand#43SMALL BRUSHED NICKEL94
+Brand#44SMALL PLATED COPPER194
+Brand#52MEDIUM BURNISHED TIN454
+Brand#52SMALL BURNISHED NICKEL144
+Brand#53MEDIUM BRUSHED COPPER34
+Brand#55STANDARD ANODIZED BRASS364
+Brand#55STANDARD BRUSHED COPPER34
+Brand#13SMALL BRUSHED NICKEL192
+Brand#25SMALL BURNISHED COPPER32
+Brand#43MEDIUM ANODIZED BRASS142
+Brand#53STANDARD PLATED STEEL452
+Brand#24MEDIUM PLATED STEEL191
+Brand#51ECONOMY POLISHED STEEL491
+Brand#53LARGE BURNISHED NICKEL231
+Brand#54ECONOMY ANODIZED BRASS91
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q17_small_quantity_order_revenue.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q17_small_quantity_order_revenue.result
new file mode 100644
index 0000000..ccfa2e3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q17_small_quantity_order_revenue.result
@@ -0,0 +1 @@
+863.2285714285715
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q18_large_volume_customer.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q18_large_volume_customer.result
new file mode 100644
index 0000000..32d3515
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q18_large_volume_customer.result
@@ -0,0 +1,100 @@
+Customer#0000000707025671998-02-27263411.29266.0
+Customer#0000000101044211997-04-04258779.02255.0
+Customer#0000000525257651994-12-15249900.42247.0
+Customer#0000000828234601995-10-03245976.74254.0
+Customer#0000000686822081995-05-01245388.06256.0
+Customer#0000000282823061995-07-26244704.23235.0
+Customer#00000014614659251995-11-13242588.87242.0
+Customer#0000000292911211997-01-13241837.88242.0
+Customer#0000000676739071992-08-19240457.56239.0
+Customer#0000000767651581997-01-21240284.95248.0
+Customer#00000013113144841996-12-24237947.61243.0
+Customer#0000001151156451994-12-03234763.73245.0
+Customer#0000000494942941992-08-15232194.74225.0
+Customer#0000000767614771997-08-24231831.35236.0
+Customer#0000000444446451994-09-20231012.22248.0
+Customer#0000000898959571993-12-27230949.45242.0
+Customer#000000076763261995-06-04229165.17228.0
+Customer#000000067679281995-03-02228136.49241.0
+Customer#0000000797938081994-04-24228054.01227.0
+Customer#0000000373753171994-09-09228002.51231.0
+Customer#00000000443581993-09-20226806.66223.0
+Customer#00000014214256991992-07-30226314.91240.0
+Customer#00000012112118881993-10-31224724.11225.0
+Customer#0000000949426901996-03-31224674.27219.0
+Customer#0000000949454131997-10-17224382.57212.0
+Customer#0000000323253811993-01-29223995.46228.0
+Customer#0000001451455181998-02-08223537.09214.0
+Customer#0000000292929451996-01-03223507.72231.0
+Customer#000000007736541992-06-03222653.54222.0
+Customer#0000001451458071993-11-24222392.53216.0
+Customer#00000014914936191996-11-20222274.54221.0
+Customer#0000000707054721993-04-11221636.83217.0
+Customer#00000013713749001992-06-30221320.76227.0
+Customer#00000010610637781993-05-26221036.31225.0
+Customer#00000012112111531996-04-18220727.97209.0
+Customer#0000000707040041993-05-07220715.14228.0
+Customer#000000098987681996-08-20220636.82231.0
+Customer#00000014914956061996-11-12219959.08231.0
+Customer#000000055554841997-01-03219920.62224.0
+Customer#00000014014042301992-03-04219709.6217.0
+Customer#00000008282391996-09-20219707.84231.0
+Customer#0000000373727891998-03-14219123.27218.0
+Customer#0000000171732691996-03-01218697.85220.0
+Customer#00000014914935901995-05-13218482.7210.0
+Customer#0000001341346141992-12-01218116.21204.0
+Customer#0000000929241971996-08-13217709.03225.0
+Customer#00000013313311561996-10-19217682.81218.0
+Customer#000000046464531997-05-26216826.73226.0
+Customer#00000012412431091993-07-24216104.85210.0
+Customer#0000000434349941996-06-29216071.76213.0
+Customer#00000014914937131998-05-07215342.63213.0
+Customer#00000002929681998-04-18215135.72213.0
+Customer#0000000131324381993-07-15214494.39210.0
+Customer#00000013313346131998-03-05212339.55214.0
+Customer#00000010610617611993-12-24211925.95218.0
+Customer#0000000494912481992-01-02210713.88207.0
+Customer#000000005558591997-04-23210643.96211.0
+Customer#00000010610618271996-06-22210113.88205.0
+Customer#0000000858551841998-07-20209155.48213.0
+Customer#0000001331337101993-01-02208974.42196.0
+Customer#0000000525251861996-08-03208892.63210.0
+Customer#0000000282820501994-06-02208517.98217.0
+Customer#0000000767621801996-09-14208481.57212.0
+Customer#00000011911935881995-03-19207925.83212.0
+Customer#00000013413414441994-12-06207907.6205.0
+Customer#0000001031037421994-12-23207632.55198.0
+Customer#0000000171740991992-08-21207364.8208.0
+Customer#00000010910912861993-05-14207291.83200.0
+Customer#0000000797956331998-05-31207119.83203.0
+Customer#0000000626220221992-03-15206742.11209.0
+Customer#0000000222245831994-09-25206495.43197.0
+Customer#00000014814851851997-07-25206179.68198.0
+Customer#0000000444431751994-07-15205282.63215.0
+Customer#0000000565625651998-02-28204438.57201.0
+Customer#00000014914937471996-08-20204355.65195.0
+Customer#00000010110149641997-07-28204163.1197.0
+Customer#0000000626249921992-05-10203904.8198.0
+Customer#0000000101037511994-04-27202917.72204.0
+Customer#0000000767625341996-07-17202784.54214.0
+Customer#00000000111641992-10-21202660.52213.0
+Customer#00000011811812831996-08-30202623.92200.0
+Customer#0000000101018901996-12-18202364.58207.0
+Customer#0000000777717621994-08-20202227.17216.0
+Customer#00000010610641961998-05-15201455.98198.0
+Customer#0000000646458951997-01-01201419.83200.0
+Customer#00000000886441992-05-01201268.06202.0
+Customer#000000047472611993-06-29201003.12200.0
+Customer#0000000797946721995-11-07199593.71203.0
+Customer#0000001311319301994-12-17199102.23204.0
+Customer#00000011811841611993-08-21198995.21211.0
+Customer#0000000737340691992-05-13198816.13199.0
+Customer#00000014214256961995-05-04198723.3198.0
+Customer#00000013413438721996-09-06198538.68207.0
+Customer#00000012712710591994-02-27198360.22194.0
+Customer#00000010310342931996-08-20198322.91202.0
+Customer#000000080809931995-09-10198238.65194.0
+Customer#000000091914201995-10-31198039.23200.0
+Customer#0000000929233331992-09-16197973.22195.0
+Customer#00000014614641921998-04-19197192.95209.0
+Customer#00000014514515751995-09-13197031.52204.0
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q19_discounted_revenue.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q19_discounted_revenue.result
new file mode 100644
index 0000000..2e44572
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q19_discounted_revenue.result
@@ -0,0 +1 @@
+51515.7344
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q1_pricing_summary_report.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q1_pricing_summary_report.result
new file mode 100644
index 0000000..c1a7b06
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q1_pricing_summary_report.result
@@ -0,0 +1,4 @@
+AF37474.03.756962464000004E73.567619209699997E73.7101416222424E725.35453315290933725419.2318267929880.0508660351826793961478
+NF1041.01041301.07999060.89799999991036450.8022827.39473684210526427402.6597368421030.0428947368421052638
+NO75168.07.538495536999999E77.165316630340007E77.4498798133073E725.55865351921115225632.4227711662680.049697381842910722941
+RF36511.03.657084124000003E73.473847287579999E73.616906011219296E725.05902539464653225100.09693891560.050027453671928671457
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q20_potential_part_promotion.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q20_potential_part_promotion.result
new file mode 100644
index 0000000..d808757
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q20_potential_part_promotion.result
@@ -0,0 +1,10 @@
+Supplier#000000001 N kD4on9OM Ipw3,gf0JBoQDd7tgrzrddZ
+Supplier#00000000289eJ5ksX3ImxJQBvxObC,
+Supplier#000000003q1,G3Pj6OjIuUYfUoH18BFTKP5aU9bEV3
+Supplier#000000004Bk7ah4CK8SYQTepEmvMkkgMwg
+Supplier#000000005Gcdm2rJRzl5qlTVzc
+Supplier#000000006tQxuVm7s7CnK
+Supplier#000000007s,4TicNGB4uO6PaSqNBUq
+Supplier#0000000089Sq4bBH2FQEmaFOocY45sRTxo6yuoG
+Supplier#0000000091KhUgZegwM3ua7dsYmekYBsK
+Supplier#000000010Saygah3gYWMp72i PY
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q21_suppliers_who_kept_orders_waiting.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q21_suppliers_who_kept_orders_waiting.result
new file mode 100644
index 0000000..50fa26f
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q21_suppliers_who_kept_orders_waiting.result
@@ -0,0 +1,10 @@
+Supplier#000000007431
+Supplier#000000005417
+Supplier#000000001403
+Supplier#000000009373
+Supplier#000000004367
+Supplier#000000002364
+Supplier#000000010358
+Supplier#000000003349
+Supplier#000000008347
+Supplier#000000006343
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q22_global_sales_opportunity.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q22_global_sales_opportunity.result
new file mode 100644
index 0000000..08bcd0c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q22_global_sales_opportunity.result
@@ -0,0 +1,23 @@
+10320747.13
+11535208.880000000005
+12213735.27
+13213545.3
+1419963.15
+15214624.84
+16211239.02
+1719127.27
+18322156.91
+19643758.41
+20323085.67
+21319400.52
+22320332.18
+23325483.06
+25319038.36
+26538943.899999999994
+27213248.06
+28542700.5
+29436059.009999999995
+30217528.46
+31323599.11
+32425754.22
+33320359.59
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q2_minimum_cost_supplier.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q2_minimum_cost_supplier.result
new file mode 100644
index 0000000..402ecf3
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q2_minimum_cost_supplier.result
@@ -0,0 +1,11 @@
+6820.35Supplier#000000007UNITED KINGDOM2Manufacturer#1s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM4Manufacturer#3s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM22Manufacturer#4s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM62Manufacturer#3s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM79Manufacturer#4s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM94Manufacturer#3s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM102Manufacturer#3s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM106Manufacturer#3s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM131Manufacturer#5s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM159Manufacturer#4s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
+6820.35Supplier#000000007UNITED KINGDOM193Manufacturer#4s,4TicNGB4uO6PaSqNBUq33-990-965-2201s unwind silently furiously regular courts. final requests are deposits. requests wake quietly blit
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q3_shipping_priority.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q3_shipping_priority.result
new file mode 100644
index 0000000..94b2cb9
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q3_shipping_priority.result
@@ -0,0 +1,10 @@
+157454559.51996-12-120
+301253664.311993-05-050
+115353458.01996-04-180
+128452830.331996-01-070
+317151436.9351993-04-060
+32350547.61994-03-260
+153749641.821992-02-150
+205148951.7775999999941996-03-180
+128648603.9036000000051993-05-140
+61448531.71521992-12-010
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q4_order_priority.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q4_order_priority.result
new file mode 100644
index 0000000..0e757ee
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q4_order_priority.result
@@ -0,0 +1,5 @@
+1-URGENT9
+2-HIGH7
+3-MEDIUM9
+4-NOT SPECIFIED8
+5-LOW12
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q5_local_supplier_volume.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q5_local_supplier_volume.result
new file mode 100644
index 0000000..ecdf467
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q5_local_supplier_volume.result
@@ -0,0 +1,8 @@
+PERU1099912.8209
+MOROCCO520107.17919999996
+IRAN375610.964
+IRAQ364417.398
+ETHIOPIA253825.7622
+ARGENTINA102659.0106
+UNITED KINGDOM61065.8711
+KENYA29679.393200000002
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q6_forecast_revenue_change.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q6_forecast_revenue_change.result
new file mode 100644
index 0000000..45bb483
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q6_forecast_revenue_change.result
@@ -0,0 +1 @@
+77949.91860000002
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q7_volume_shipping.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q7_volume_shipping.result
new file mode 100644
index 0000000..c4cfcee
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q7_volume_shipping.result
@@ -0,0 +1,37 @@
+ARGENTINAGERMANY199263089.1006
+ARGENTINAGERMANY199364024.4532
+ARGENTINAGERMANY199432719.877199999995
+ARGENTINAGERMANY199563729.862400000005
+ARGENTINAGERMANY19961801.8198
+ETHIOPIAGERMANY199274693.317
+ETHIOPIAGERMANY199313733.706600000001
+ETHIOPIAGERMANY199483631.40359999999
+ETHIOPIAGERMANY199569329.67199999999
+ETHIOPIAGERMANY199642017.435999999994
+IRANGERMANY199238014.335399999996
+IRANGERMANY1994252152.5927
+IRANGERMANY19959106.957199999999
+IRAQGERMANY199268040.7747
+IRAQGERMANY19933676.8004
+IRAQGERMANY199485948.85280000001
+IRAQGERMANY199566380.2488
+KENYAGERMANY199277164.5422
+KENYAGERMANY199363792.8736
+KENYAGERMANY199474537.6256
+KENYAGERMANY199537851.309
+KENYAGERMANY199618467.316
+MOROCCOGERMANY199289669.69080000001
+MOROCCOGERMANY1994173726.0087
+MOROCCOGERMANY199537169.8497
+PERUGERMANY1992226624.7652
+PERUGERMANY199358359.3076
+PERUGERMANY1994345376.2983
+PERUGERMANY199552968.9424
+PERUGERMANY19967960.72
+UNITED KINGDOMGERMANY1992100143.32140000002
+UNITED KINGDOMGERMANY199341582.5227
+UNITED KINGDOMGERMANY1994164740.3271
+UNITED KINGDOMGERMANY199650909.551999999996
+UNITED STATESGERMANY199252480.9528
+UNITED STATESGERMANY1994115566.8388
+UNITED STATESGERMANY199580489.69949999999
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q8_national_market_share.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q8_national_market_share.result
new file mode 100644
index 0000000..7d7bd56
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q8_national_market_share.result
@@ -0,0 +1,2 @@
+19950.0
+19960.0
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q9_product_type_profit.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q9_product_type_profit.result
new file mode 100644
index 0000000..f900b06
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/q9_product_type_profit.result
@@ -0,0 +1,60 @@
+ARGENTINA199817779.069700000007
+ARGENTINA199713943.953800000003
+ARGENTINA19967641.422700000003
+ARGENTINA199520892.752500000002
+ARGENTINA199415088.352599999998
+ARGENTINA199317586.344600000004
+ARGENTINA199228732.461499999994
+ETHIOPIA199828217.159999999996
+ETHIOPIA199633970.65
+ETHIOPIA199537720.35
+ETHIOPIA199437251.01
+ETHIOPIA199323782.61
+IRAN199723590.007999999998
+IRAN19967428.232500000005
+IRAN199521000.996499999994
+IRAN199429408.13
+IRAN199349876.41499999999
+IRAN199252064.24
+IRAQ199811619.960399999996
+IRAQ199747910.246
+IRAQ199618459.567499999997
+IRAQ199532782.37010000001
+IRAQ19949041.2317
+IRAQ199330687.2625
+IRAQ199229098.2557
+KENYA199833148.3345
+KENYA199754355.016500000005
+KENYA199653607.4854
+KENYA199585354.87380000002
+KENYA1994102904.2511
+KENYA1993109310.8084
+KENYA1992138534.12099999998
+MOROCCO1998157058.2328
+MOROCCO199788669.96099999998
+MOROCCO1996236833.66719999994
+MOROCCO1995381575.86679999996
+MOROCCO1994243523.4336
+MOROCCO1993232196.78029999993
+MOROCCO1992347434.1452
+PERU1998101109.01959999997
+PERU199758073.086599999995
+PERU199630360.52179999999
+PERU1995138451.77999999997
+PERU199455023.063200000004
+PERU1993110409.0863
+PERU199270946.1916
+UNITED KINGDOM1998139685.04400000002
+UNITED KINGDOM1997183502.04979999995
+UNITED KINGDOM1996374085.28839999996
+UNITED KINGDOM1995548356.7983999999
+UNITED KINGDOM1994266982.7679999999
+UNITED KINGDOM1993717309.464
+UNITED KINGDOM199279540.6016
+UNITED STATES199832847.96
+UNITED STATES199730849.5
+UNITED STATES199656125.46000000001
+UNITED STATES199515961.7977
+UNITED STATES199431671.2
+UNITED STATES199355057.469
+UNITED STATES199251970.23
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u10_join.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u10_join.result
new file mode 100644
index 0000000..2239e1c
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u10_join.result
@@ -0,0 +1,1500 @@
+1051.15
+1084.38
+1147.42
+1816.28
+1861.19
+1984.14
+2007.48
+2158.13
+2638.98
+3089.42
+3223.17
+3726.14
+3808.05
+3892.77
+3942.73
+3967.47
+4104.3
+4225.26
+4766.19
+4819.91
+4820.55
+4913.06
+5184.26
+5472.17
+5978.65
+6088.41
+6402.41
+6406.29
+6793.45
+7014.31
+7102.74
+7108.12
+7211.59
+7231.91
+7471.75
+7859.36
+8225.96
+8413.31
+8709.16
+8720.45
+8945.03
+8958.65
+9006.25
+9103.4
+9495.28
+9669.46
+9679.45
+9741.03
+10163.56
+10451.97
+10500.27
+10508.12
+10645.48
+10677.86
+10934.84
+11405.4
+11474.95
+11493.8
+11575.77
+11850.45
+12137.76
+12291.83
+12896.25
+12907.62
+12918.7
+12984.85
+13197.78
+13282.23
+13491.31
+13603.08
+13679.32
+14275.01
+14790.37
+15082.82
+15313.61
+15417.57
+16003.86
+16030.15
+16346.94
+16689.19
+16763.95
+16922.51
+17031.01
+17172.66
+17213.59
+17231.05
+17603.01
+17986.15
+18247.86
+18307.45
+18566.14
+18594.66
+18653.09
+18795.62
+18885.35
+19056.99
+19405.73
+19612.03
+19811.69
+20065.73
+20099.43
+20182.22
+20214.49
+20530.97
+20752.62
+20791.5
+21088.59
+21119.86
+21137.08
+21207.08
+21267.72
+21640.1
+21760.09
+21815.3
+21964.66
+22072.16
+22294.51
+22767.49
+22840.21
+22994.51
+23020.62
+23039.46
+23067.48
+23198.24
+23280.61
+23327.88
+23476.12
+23614.89
+23973.6
+23984.88
+24265.24
+24347.36
+24362.39
+24468.16
+24637.96
+24654.79
+24660.06
+24844.39
+25007.95
+25170.88
+25661.87
+25767.07
+25861.74
+25985.52
+26011.2
+26128.99
+26714.67
+26798.65
+26839.16
+26868.85
+26906.38
+26981.31
+26999.83
+27016.74
+27049.22
+27148.63
+27204.6
+27461.48
+27561.82
+27598.17
+27629.66
+27663.16
+27727.52
+28007.73
+28070.86
+28155.92
+28223.57
+28330.42
+28571.39
+28623.04
+28658.26
+28930.68
+29029.84
+29305.47
+29673.73
+29827.44
+29920.8
+30045.95
+30059.47
+30137.17
+30457.91
+30494.62
+30495.65
+30550.9
+30722.49
+30755.69
+30778.78
+30783.05
+31043.39
+31075.51
+31084.79
+31103.83
+31471.04
+31538.94
+31689.46
+31693.88
+31795.52
+32796.35
+32890.89
+32929.3
+33085.68
+33123.28
+33124.96
+33248.04
+33396.35
+33401.77
+33470.4
+33755.47
+33998.9
+34004.48
+34035.17
+34269.96
+34363.63
+34632.57
+34768.68
+34797.72
+34936.31
+35019.95
+35131.8
+35390.15
+35514.45
+35589.57
+35795.22
+35949.14
+36024.96
+36333.34
+36389.43
+36464.76
+36468.55
+36551.43
+36592.48
+36671.88
+36889.65
+37248.78
+37301.25
+37348.62
+37398.9
+37526.68
+37696.7
+37776.79
+37804.43
+37881.31
+38038.84
+38057.81
+38065.28
+38164.23
+38330.42
+38446.39
+38545.97
+38596.81
+38974.67
+38988.98
+39048.94
+39103.37
+39190.62
+39263.28
+39338.44
+39358.51
+39382.74
+39397.6
+39470.39
+39612.63
+39700.29
+39793.05
+39805.04
+39828.51
+39835.54
+39906.87
+40142.15
+40183.29
+40234.5
+40492.37
+40548.99
+40572.64
+40975.96
+40982.08
+41032.81
+41162.24
+41375.69
+41392.31
+41433.48
+41450.19
+41492.25
+41552.78
+41573.42
+41605.63
+41655.51
+41670.02
+41686.1
+41723.86
+41758.44
+41760.0
+41811.12
+42225.53
+42410.57
+42579.4
+42867.92
+42927.07
+42945.82
+43092.76
+43255.19
+43315.15
+43360.95
+43789.14
+43809.37
+43889.17
+44002.53
+44387.23
+44429.81
+44672.03
+44777.63
+44781.32
+45311.07
+45514.27
+45536.27
+45695.84
+45704.96
+45767.69
+45860.94
+45889.09
+46076.46
+46107.7
+46298.53
+46310.83
+46355.83
+46366.56
+46376.09
+46380.69
+46393.97
+46418.85
+46459.92
+46598.65
+46753.63
+46815.93
+46918.22
+47010.15
+47033.21
+47099.71
+47120.41
+47232.79
+47272.67
+47286.32
+47384.71
+47440.91
+47447.63
+47614.08
+47623.94
+47627.89
+47753.0
+47823.04
+47852.06
+47925.47
+47940.51
+47985.98
+48024.99
+48053.18
+48206.14
+48284.06
+48419.58
+48478.54
+48497.09
+48502.79
+48781.39
+49033.69
+49305.98
+49357.72
+49625.21
+49841.12
+49903.57
+50201.16
+50287.06
+50328.84
+50601.01
+50724.06
+51004.44
+51494.47
+51697.18
+51775.54
+51839.94
+52114.01
+52190.52
+52359.51
+52414.19
+52433.54
+52562.16
+52982.23
+53212.95
+53287.25
+53581.41
+53649.35
+53827.34
+54121.92
+54175.35
+54356.1
+54478.95
+54655.07
+55090.67
+55211.04
+55553.68
+55554.97
+55582.94
+55619.01
+55892.35
+55950.21
+56207.66
+56210.26
+56227.04
+56449.23
+56779.06
+56936.1
+56938.16
+56998.36
+57092.26
+57127.71
+57213.18
+57584.12
+57697.44
+57740.74
+57823.37
+58032.77
+58094.75
+58111.0
+58168.07
+58212.22
+58218.35
+58273.89
+58546.02
+58666.79
+58853.11
+58932.19
+59180.25
+59186.02
+59291.75
+59404.77
+59417.76
+59439.44
+59455.61
+59651.38
+59931.42
+59982.31
+59989.66
+60314.97
+60568.34
+60867.14
+60868.39
+60887.9
+60918.41
+60933.29
+61052.1
+61297.42
+61811.33
+62014.51
+62108.45
+62172.34
+62251.15
+62258.18
+62277.18
+62316.61
+62430.67
+62453.97
+62497.51
+62518.31
+62541.27
+62567.99
+62661.93
+62716.67
+62807.13
+62814.89
+62972.29
+63041.33
+63103.32
+63195.54
+63278.0
+63470.78
+63535.56
+63537.13
+63590.17
+63703.92
+63873.14
+64000.93
+64102.93
+64271.75
+64344.86
+64838.66
+64892.73
+65189.17
+65218.47
+65269.38
+65331.05
+65385.42
+65601.08
+65678.21
+65702.39
+65883.92
+66158.13
+66268.86
+66408.29
+66455.34
+66697.95
+66817.05
+66927.16
+67018.3
+67045.94
+67049.37
+67167.19
+67173.82
+67226.28
+67525.43
+67572.73
+67789.42
+67941.54
+67944.38
+67979.49
+68052.7
+68056.57
+68255.82
+68309.28
+68494.08
+68519.84
+68619.29
+68817.08
+68885.66
+68908.31
+69412.71
+69447.25
+69668.22
+70182.63
+70183.29
+70232.26
+70377.31
+70392.02
+70403.62
+70430.54
+70462.84
+70502.52
+70529.27
+70553.45
+70557.05
+70857.51
+71017.99
+71241.63
+71349.3
+71362.5
+71381.21
+71453.85
+71460.49
+71543.41
+71683.84
+71781.23
+71822.86
+71852.67
+71968.1
+72055.87
+72150.68
+72359.55
+72440.52
+72533.07
+72835.95
+72843.48
+73517.91
+73739.06
+73882.37
+73907.63
+73924.21
+73962.95
+73990.08
+74483.95
+74710.74
+74882.22
+74892.08
+74940.13
+75026.51
+75030.81
+75074.07
+75144.68
+75145.87
+75661.7
+75733.58
+76067.1
+76119.72
+76164.41
+76518.11
+76799.25
+76848.96
+77247.05
+77482.87
+77487.09
+77705.4
+77754.62
+78221.69
+78567.55
+78676.54
+78711.4
+79189.58
+79197.77
+79230.47
+79248.35
+79258.24
+79270.23
+79380.51
+79594.68
+79646.89
+79683.42
+79782.56
+79785.52
+79863.84
+79901.18
+80018.54
+80084.61
+80274.22
+80437.72
+80438.38
+80487.97
+80592.44
+80624.38
+81089.61
+81138.17
+81351.53
+81663.65
+81826.12
+82026.18
+82034.03
+82151.12
+82190.77
+82197.79
+82467.29
+82493.07
+82504.56
+82563.1
+82598.87
+82746.74
+82824.14
+82918.36
+82928.12
+83413.3
+83490.99
+83665.2
+83773.49
+83804.38
+84053.93
+84314.51
+84405.78
+84493.55
+84627.76
+84651.8
+84800.44
+84871.5
+84954.79
+84983.9
+85122.24
+85255.56
+85381.0
+85394.06
+85397.04
+85477.89
+85552.21
+85755.84
+85822.67
+85861.93
+85901.7
+85927.85
+85948.02
+86076.86
+86534.05
+86615.25
+86918.57
+86958.28
+87073.89
+87248.17
+87475.82
+87689.88
+87803.55
+87892.38
+87988.34
+88047.04
+88080.33
+88216.32
+88219.12
+88448.24
+88704.26
+88966.68
+89143.36
+89224.24
+89345.99
+89359.11
+89399.4
+89503.11
+89509.91
+89592.11
+89684.31
+89731.1
+89792.48
+89877.09
+89992.48
+89999.72
+90042.41
+90380.4
+90707.58
+90755.31
+90981.28
+91017.61
+91438.59
+91513.79
+91541.48
+91664.85
+91678.66
+91795.13
+91929.93
+91982.29
+92069.62
+92123.32
+92187.8
+92261.08
+92326.79
+92340.77
+92484.7
+92716.17
+92730.74
+92798.66
+92851.8
+92856.91
+93206.35
+93259.93
+93335.6
+93403.05
+93769.28
+93828.15
+94030.43
+94135.77
+94231.71
+94400.43
+94446.69
+94527.23
+94534.07
+94649.25
+94866.39
+94969.41
+95063.41
+95126.32
+95291.79
+95312.81
+95453.8
+95563.95
+95591.4
+95731.5
+95761.93
+95929.46
+96015.13
+96057.42
+96166.92
+96359.65
+96431.77
+96458.03
+96596.81
+96855.29
+97502.23
+97733.87
+97758.28
+97981.06
+98140.86
+98258.73
+98275.37
+98335.61
+98422.83
+98485.21
+98541.95
+98643.17
+98723.11
+98753.57
+98956.82
+98987.51
+99050.81
+99088.75
+99177.69
+99290.01
+99377.51
+99494.67
+99577.55
+99798.76
+99834.47
+99851.38
+99960.46
+100035.03
+100106.96
+100290.07
+100445.59
+100671.06
+100714.13
+100749.6
+100750.67
+100758.71
+100954.64
+101020.75
+101202.18
+101240.96
+101339.68
+101429.61
+101616.44
+101709.52
+101878.46
+101899.93
+102207.2
+102226.59
+102534.63
+102665.03
+102693.61
+102793.59
+102807.59
+103085.13
+103192.74
+103320.91
+103641.15
+103656.44
+103814.27
+104038.78
+104166.56
+104259.88
+104391.11
+104523.03
+104585.77
+104664.4
+104695.09
+104759.25
+104927.66
+104966.33
+105094.09
+105145.4
+105302.05
+105421.09
+105492.37
+105561.21
+105770.53
+105789.01
+106036.84
+106045.89
+106122.38
+106150.05
+106315.25
+106446.02
+106612.48
+106635.21
+106823.97
+106935.19
+107139.29
+107140.22
+107231.6
+107406.26
+107732.23
+107824.4
+107919.86
+107958.62
+108107.42
+108171.38
+108196.56
+108239.46
+108317.51
+108334.3
+108353.08
+108361.46
+108412.57
+108424.94
+108443.84
+109077.69
+109202.9
+109246.54
+109247.0
+109301.02
+109351.87
+109469.9
+109536.55
+109979.71
+110194.31
+110432.76
+110626.82
+110826.83
+110958.36
+111020.79
+111207.93
+111403.66
+111547.31
+111597.96
+111924.56
+112444.42
+112603.34
+112770.89
+112843.52
+112845.04
+112912.0
+113156.3
+113191.45
+113417.03
+113505.19
+113701.89
+113954.89
+114097.63
+114145.18
+114681.55
+114879.19
+114978.03
+114990.63
+115161.29
+115219.88
+115411.37
+115688.85
+115717.37
+115759.13
+115877.4
+115929.14
+115959.96
+116003.11
+116069.66
+116093.49
+116127.69
+116193.97
+116227.05
+116258.53
+116740.67
+116789.98
+116792.13
+116923.0
+117132.72
+117397.16
+117537.87
+117728.37
+117817.52
+117827.18
+117909.23
+118036.54
+118201.53
+118464.65
+118495.12
+118570.79
+118802.62
+118896.95
+119164.96
+119201.64
+119605.91
+119820.38
+119838.14
+119887.47
+119910.04
+119917.28
+120053.52
+120073.51
+120086.84
+120324.82
+120516.93
+120533.46
+120626.49
+120828.12
+121220.59
+121360.83
+121663.68
+121704.45
+121935.23
+121994.04
+122157.14
+122490.66
+122611.05
+122621.31
+122785.82
+122823.78
+122964.66
+122969.79
+123014.83
+123120.06
+123477.05
+123586.03
+123956.25
+124380.73
+124402.59
+124470.32
+124539.0
+124608.69
+124637.19
+124661.48
+124675.27
+124719.97
+124950.79
+125011.92
+125030.37
+125125.57
+125170.86
+125188.72
+125191.12
+125396.8
+125509.17
+125562.09
+125792.83
+126066.0
+126113.32
+126205.42
+126235.35
+126597.21
+126804.9
+126902.81
+126948.81
+127068.89
+127132.51
+127134.05
+127191.47
+127345.45
+127527.05
+127532.2
+127717.72
+127817.38
+127934.71
+128014.15
+128024.71
+128234.96
+128367.97
+128624.99
+128776.9
+128786.57
+129004.81
+129012.84
+129033.13
+129062.13
+129086.93
+129096.8
+129546.56
+129636.99
+129657.08
+129803.03
+129821.09
+130125.64
+130204.17
+130345.9
+130515.61
+130647.18
+130687.64
+130702.19
+131079.52
+131092.67
+131103.31
+131122.82
+131146.47
+131251.81
+131432.42
+131447.03
+131604.34
+131752.07
+131891.05
+132494.97
+132838.49
+132854.79
+132972.24
+133002.55
+133038.59
+133273.64
+133451.14
+133665.12
+133829.35
+133864.82
+134308.04
+134333.33
+134413.58
+134442.37
+134726.09
+134814.65
+135157.92
+135187.33
+135335.96
+135613.18
+135643.87
+135647.68
+135745.58
+135761.05
+135934.6
+136058.7
+136162.13
+136360.37
+136517.34
+136582.6
+136634.34
+136765.03
+136954.81
+137030.4
+137223.14
+137297.71
+137369.5
+137473.58
+137576.19
+138010.76
+138423.03
+138584.2
+138902.23
+139047.22
+139104.17
+139124.72
+139332.94
+139542.14
+139579.18
+139580.85
+139714.71
+139854.41
+139902.71
+139915.23
+140031.23
+140363.7
+140390.6
+140608.69
+140685.01
+140838.11
+141118.87
+141159.63
+141311.01
+141486.77
+141554.06
+141647.08
+141679.41
+141822.19
+141824.23
+141858.97
+141902.54
+142029.67
+142070.65
+142290.77
+142291.79
+142322.33
+142323.38
+142494.99
+142767.26
+142866.39
+142891.22
+143070.7
+143191.54
+143212.85
+143276.28
+143350.75
+143411.69
+143753.01
+143813.39
+143899.85
+144123.37
+144335.16
+145040.38
+145060.41
+145096.17
+145100.47
+145232.09
+145249.13
+145426.11
+145495.62
+145630.76
+145654.97
+145695.42
+145713.03
+145730.19
+145761.99
+145768.47
+145857.6
+145898.47
+145906.24
+145971.6
+146136.1
+146221.66
+146298.28
+146382.71
+146581.14
+146849.33
+146862.27
+146896.72
+146933.07
+147071.86
+147243.86
+147329.51
+147343.68
+147543.26
+147915.68
+148176.06
+148299.05
+148500.71
+148682.82
+148789.52
+149451.88
+149466.62
+149536.2
+149614.34
+149671.92
+150334.57
+150345.63
+150349.92
+150582.77
+150585.73
+150655.44
+150886.49
+151089.96
+151148.81
+151233.65
+151282.65
+151404.78
+151419.5
+151515.08
+151801.06
+152940.0
+153024.28
+153048.74
+153069.14
+153233.93
+153259.41
+153386.61
+153426.79
+153568.02
+153637.79
+153720.22
+153864.67
+154260.84
+154383.37
+154590.05
+154653.32
+154936.43
+154958.89
+154967.89
+155017.92
+155045.39
+155356.8
+155680.6
+156018.74
+156345.64
+156381.95
+156407.4
+156477.94
+156697.55
+156802.8
+157040.57
+157062.7
+157767.86
+157968.27
+158345.31
+158479.37
+158776.68
+158853.63
+158885.83
+158893.16
+158991.89
+159005.35
+159015.39
+159170.8
+159171.69
+159578.94
+159720.39
+159870.44
+160627.01
+160882.76
+161066.22
+161307.05
+161625.5
+161745.44
+162088.3
+162113.46
+162165.94
+162176.23
+162634.53
+162786.67
+163709.85
+163746.47
+163794.53
+163834.46
+163966.67
+164462.61
+165019.32
+165219.08
+165454.51
+165489.52
+165655.99
+165890.47
+166335.03
+166506.22
+166669.86
+166947.75
+166961.06
+167017.39
+167056.34
+167262.34
+168495.03
+168562.27
+168618.39
+168721.45
+168750.48
+168952.1
+169107.85
+169756.19
+169797.4
+169847.63
+170360.27
+171128.1
+171326.48
+171488.73
+171522.54
+171894.45
+171975.62
+172021.87
+172102.96
+172436.3
+172861.58
+172872.37
+172899.84
+172908.01
+173024.71
+173130.2
+173145.37
+173340.09
+173444.6
+173522.71
+174090.3
+174223.2
+174569.88
+174634.12
+174798.97
+175017.68
+175142.28
+175422.13
+176084.63
+176278.57
+176294.34
+176525.53
+176647.54
+176864.83
+176867.34
+176911.21
+177181.67
+177458.97
+178060.22
+178249.05
+178254.66
+178491.24
+178492.01
+178821.73
+179287.95
+179292.14
+179418.31
+179462.21
+179554.41
+179686.07
+179747.47
+179827.12
+179854.51
+179923.54
+179984.42
+180054.29
+180119.22
+180396.95
+180417.11
+180455.98
+180478.16
+180692.9
+180737.75
+180912.15
+181077.36
+181320.5
+181346.56
+182025.95
+182432.17
+182516.77
+182966.39
+183104.71
+183176.6
+183286.33
+183493.42
+183620.33
+183671.08
+183734.56
+183965.61
+184172.31
+184583.99
+185496.66
+185968.15
+186215.81
+186370.23
+186669.1
+186912.51
+187156.38
+187514.11
+187516.29
+187553.35
+187932.3
+188124.55
+188985.18
+189361.42
+189547.57
+189651.76
+190142.17
+190490.78
+190652.53
+190693.92
+190960.69
+192074.23
+192217.86
+192417.85
+193832.28
+193857.67
+194119.31
+194159.59
+195515.26
+195834.96
+195844.84
+196080.26
+196443.16
+196989.09
+197031.52
+197192.95
+197973.22
+198039.23
+198238.65
+198322.91
+198360.22
+198538.68
+198723.3
+198816.13
+198995.21
+199102.23
+199593.71
+201003.12
+201268.06
+201419.83
+201455.98
+202227.17
+202364.58
+202623.92
+202660.52
+202784.54
+202917.72
+203904.8
+204163.1
+204355.65
+204438.57
+205282.63
+206179.68
+206495.43
+206742.11
+207119.83
+207291.83
+207364.8
+207632.55
+207907.6
+207925.83
+208481.57
+208517.98
+208892.63
+208974.42
+209155.48
+210113.88
+210643.96
+210713.88
+211925.95
+212339.55
+214494.39
+215135.72
+215342.63
+216071.76
+216104.85
+216826.73
+217682.81
+217709.03
+218116.21
+218482.7
+218697.85
+219123.27
+219707.84
+219709.6
+219920.62
+219959.08
+220636.82
+220715.14
+220727.97
+221036.31
+221320.76
+221636.83
+222274.54
+222392.53
+222653.54
+223507.72
+223537.09
+223995.46
+224382.57
+224674.27
+224724.11
+226314.91
+226806.66
+228002.51
+228054.01
+228136.49
+229165.17
+230949.45
+231012.22
+231831.35
+232194.74
+234763.73
+237947.61
+240284.95
+240457.56
+241837.88
+242588.87
+244704.23
+245388.06
+245976.74
+249900.42
+258779.02
+263411.29
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u10_nestedloop_join.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u10_nestedloop_join.result
new file mode 100644
index 0000000..ffc76a4
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u10_nestedloop_join.result
@@ -0,0 +1,300 @@
+ARGENTINAALGERIA10
+BRAZILALGERIA20
+BRAZILARGENTINA21
+CANADAALGERIA30
+CANADAARGENTINA31
+CANADABRAZIL32
+CHINAALGERIA180
+CHINAARGENTINA181
+CHINABRAZIL182
+CHINACANADA183
+CHINAEGYPT184
+CHINAETHIOPIA185
+CHINAFRANCE186
+CHINAGERMANY187
+CHINAINDIA188
+CHINAINDONESIA189
+CHINAIRAN1810
+CHINAIRAQ1811
+CHINAJAPAN1812
+CHINAJORDAN1813
+CHINAKENYA1814
+CHINAMOROCCO1815
+CHINAMOZAMBIQUE1816
+CHINAPERU1817
+EGYPTALGERIA40
+EGYPTARGENTINA41
+EGYPTBRAZIL42
+EGYPTCANADA43
+ETHIOPIAALGERIA50
+ETHIOPIAARGENTINA51
+ETHIOPIABRAZIL52
+ETHIOPIACANADA53
+ETHIOPIAEGYPT54
+FRANCEALGERIA60
+FRANCEARGENTINA61
+FRANCEBRAZIL62
+FRANCECANADA63
+FRANCEEGYPT64
+FRANCEETHIOPIA65
+GERMANYALGERIA70
+GERMANYARGENTINA71
+GERMANYBRAZIL72
+GERMANYCANADA73
+GERMANYEGYPT74
+GERMANYETHIOPIA75
+GERMANYFRANCE76
+INDIAALGERIA80
+INDIAARGENTINA81
+INDIABRAZIL82
+INDIACANADA83
+INDIAEGYPT84
+INDIAETHIOPIA85
+INDIAFRANCE86
+INDIAGERMANY87
+INDONESIAALGERIA90
+INDONESIAARGENTINA91
+INDONESIABRAZIL92
+INDONESIACANADA93
+INDONESIAEGYPT94
+INDONESIAETHIOPIA95
+INDONESIAFRANCE96
+INDONESIAGERMANY97
+INDONESIAINDIA98
+IRANALGERIA100
+IRANARGENTINA101
+IRANBRAZIL102
+IRANCANADA103
+IRANEGYPT104
+IRANETHIOPIA105
+IRANFRANCE106
+IRANGERMANY107
+IRANINDIA108
+IRANINDONESIA109
+IRAQALGERIA110
+IRAQARGENTINA111
+IRAQBRAZIL112
+IRAQCANADA113
+IRAQEGYPT114
+IRAQETHIOPIA115
+IRAQFRANCE116
+IRAQGERMANY117
+IRAQINDIA118
+IRAQINDONESIA119
+IRAQIRAN1110
+JAPANALGERIA120
+JAPANARGENTINA121
+JAPANBRAZIL122
+JAPANCANADA123
+JAPANEGYPT124
+JAPANETHIOPIA125
+JAPANFRANCE126
+JAPANGERMANY127
+JAPANINDIA128
+JAPANINDONESIA129
+JAPANIRAN1210
+JAPANIRAQ1211
+JORDANALGERIA130
+JORDANARGENTINA131
+JORDANBRAZIL132
+JORDANCANADA133
+JORDANEGYPT134
+JORDANETHIOPIA135
+JORDANFRANCE136
+JORDANGERMANY137
+JORDANINDIA138
+JORDANINDONESIA139
+JORDANIRAN1310
+JORDANIRAQ1311
+JORDANJAPAN1312
+KENYAALGERIA140
+KENYAARGENTINA141
+KENYABRAZIL142
+KENYACANADA143
+KENYAEGYPT144
+KENYAETHIOPIA145
+KENYAFRANCE146
+KENYAGERMANY147
+KENYAINDIA148
+KENYAINDONESIA149
+KENYAIRAN1410
+KENYAIRAQ1411
+KENYAJAPAN1412
+KENYAJORDAN1413
+MOROCCOALGERIA150
+MOROCCOARGENTINA151
+MOROCCOBRAZIL152
+MOROCCOCANADA153
+MOROCCOEGYPT154
+MOROCCOETHIOPIA155
+MOROCCOFRANCE156
+MOROCCOGERMANY157
+MOROCCOINDIA158
+MOROCCOINDONESIA159
+MOROCCOIRAN1510
+MOROCCOIRAQ1511
+MOROCCOJAPAN1512
+MOROCCOJORDAN1513
+MOROCCOKENYA1514
+MOZAMBIQUEALGERIA160
+MOZAMBIQUEARGENTINA161
+MOZAMBIQUEBRAZIL162
+MOZAMBIQUECANADA163
+MOZAMBIQUEEGYPT164
+MOZAMBIQUEETHIOPIA165
+MOZAMBIQUEFRANCE166
+MOZAMBIQUEGERMANY167
+MOZAMBIQUEINDIA168
+MOZAMBIQUEINDONESIA169
+MOZAMBIQUEIRAN1610
+MOZAMBIQUEIRAQ1611
+MOZAMBIQUEJAPAN1612
+MOZAMBIQUEJORDAN1613
+MOZAMBIQUEKENYA1614
+MOZAMBIQUEMOROCCO1615
+PERUALGERIA170
+PERUARGENTINA171
+PERUBRAZIL172
+PERUCANADA173
+PERUEGYPT174
+PERUETHIOPIA175
+PERUFRANCE176
+PERUGERMANY177
+PERUINDIA178
+PERUINDONESIA179
+PERUIRAN1710
+PERUIRAQ1711
+PERUJAPAN1712
+PERUJORDAN1713
+PERUKENYA1714
+PERUMOROCCO1715
+PERUMOZAMBIQUE1716
+ROMANIAALGERIA190
+ROMANIAARGENTINA191
+ROMANIABRAZIL192
+ROMANIACANADA193
+ROMANIACHINA1918
+ROMANIAEGYPT194
+ROMANIAETHIOPIA195
+ROMANIAFRANCE196
+ROMANIAGERMANY197
+ROMANIAINDIA198
+ROMANIAINDONESIA199
+ROMANIAIRAN1910
+ROMANIAIRAQ1911
+ROMANIAJAPAN1912
+ROMANIAJORDAN1913
+ROMANIAKENYA1914
+ROMANIAMOROCCO1915
+ROMANIAMOZAMBIQUE1916
+ROMANIAPERU1917
+RUSSIAALGERIA220
+RUSSIAARGENTINA221
+RUSSIABRAZIL222
+RUSSIACANADA223
+RUSSIACHINA2218
+RUSSIAEGYPT224
+RUSSIAETHIOPIA225
+RUSSIAFRANCE226
+RUSSIAGERMANY227
+RUSSIAINDIA228
+RUSSIAINDONESIA229
+RUSSIAIRAN2210
+RUSSIAIRAQ2211
+RUSSIAJAPAN2212
+RUSSIAJORDAN2213
+RUSSIAKENYA2214
+RUSSIAMOROCCO2215
+RUSSIAMOZAMBIQUE2216
+RUSSIAPERU2217
+RUSSIAROMANIA2219
+RUSSIASAUDI ARABIA2220
+RUSSIAVIETNAM2221
+SAUDI ARABIAALGERIA200
+SAUDI ARABIAARGENTINA201
+SAUDI ARABIABRAZIL202
+SAUDI ARABIACANADA203
+SAUDI ARABIACHINA2018
+SAUDI ARABIAEGYPT204
+SAUDI ARABIAETHIOPIA205
+SAUDI ARABIAFRANCE206
+SAUDI ARABIAGERMANY207
+SAUDI ARABIAINDIA208
+SAUDI ARABIAINDONESIA209
+SAUDI ARABIAIRAN2010
+SAUDI ARABIAIRAQ2011
+SAUDI ARABIAJAPAN2012
+SAUDI ARABIAJORDAN2013
+SAUDI ARABIAKENYA2014
+SAUDI ARABIAMOROCCO2015
+SAUDI ARABIAMOZAMBIQUE2016
+SAUDI ARABIAPERU2017
+SAUDI ARABIAROMANIA2019
+UNITED KINGDOMALGERIA230
+UNITED KINGDOMARGENTINA231
+UNITED KINGDOMBRAZIL232
+UNITED KINGDOMCANADA233
+UNITED KINGDOMCHINA2318
+UNITED KINGDOMEGYPT234
+UNITED KINGDOMETHIOPIA235
+UNITED KINGDOMFRANCE236
+UNITED KINGDOMGERMANY237
+UNITED KINGDOMINDIA238
+UNITED KINGDOMINDONESIA239
+UNITED KINGDOMIRAN2310
+UNITED KINGDOMIRAQ2311
+UNITED KINGDOMJAPAN2312
+UNITED KINGDOMJORDAN2313
+UNITED KINGDOMKENYA2314
+UNITED KINGDOMMOROCCO2315
+UNITED KINGDOMMOZAMBIQUE2316
+UNITED KINGDOMPERU2317
+UNITED KINGDOMROMANIA2319
+UNITED KINGDOMRUSSIA2322
+UNITED KINGDOMSAUDI ARABIA2320
+UNITED KINGDOMVIETNAM2321
+UNITED STATESALGERIA240
+UNITED STATESARGENTINA241
+UNITED STATESBRAZIL242
+UNITED STATESCANADA243
+UNITED STATESCHINA2418
+UNITED STATESEGYPT244
+UNITED STATESETHIOPIA245
+UNITED STATESFRANCE246
+UNITED STATESGERMANY247
+UNITED STATESINDIA248
+UNITED STATESINDONESIA249
+UNITED STATESIRAN2410
+UNITED STATESIRAQ2411
+UNITED STATESJAPAN2412
+UNITED STATESJORDAN2413
+UNITED STATESKENYA2414
+UNITED STATESMOROCCO2415
+UNITED STATESMOZAMBIQUE2416
+UNITED STATESPERU2417
+UNITED STATESROMANIA2419
+UNITED STATESRUSSIA2422
+UNITED STATESSAUDI ARABIA2420
+UNITED STATESUNITED KINGDOM2423
+UNITED STATESVIETNAM2421
+VIETNAMALGERIA210
+VIETNAMARGENTINA211
+VIETNAMBRAZIL212
+VIETNAMCANADA213
+VIETNAMCHINA2118
+VIETNAMEGYPT214
+VIETNAMETHIOPIA215
+VIETNAMFRANCE216
+VIETNAMGERMANY217
+VIETNAMINDIA218
+VIETNAMINDONESIA219
+VIETNAMIRAN2110
+VIETNAMIRAQ2111
+VIETNAMJAPAN2112
+VIETNAMJORDAN2113
+VIETNAMKENYA2114
+VIETNAMMOROCCO2115
+VIETNAMMOZAMBIQUE2116
+VIETNAMPERU2117
+VIETNAMROMANIA2119
+VIETNAMSAUDI ARABIA2120
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u1_gby.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u1_gby.result
new file mode 100644
index 0000000..7efa15a
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u1_gby.result
@@ -0,0 +1,5 @@
+05
+15
+25
+35
+45
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u2_gby_external.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u2_gby_external.result
new file mode 100644
index 0000000..b30110b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u2_gby_external.result
@@ -0,0 +1,10 @@
+598841.0
+598725.25
+598617.8
+59854.0
+598420.25
+595928.857142857142858
+595829.6
+595734.57142857142857
+595630.0
+595523.0
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u3_union.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u3_union.result
new file mode 100644
index 0000000..e8ec7a2
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u3_union.result
@@ -0,0 +1,9 @@
+2 N kD4on9OM Ipw3,gf0JBoQDd7tgrzrddZ17Supplier#000000001
+181KhUgZegwM3ua7dsYmekYBsK10Supplier#000000009
+489eJ5ksX3ImxJQBvxObC,5Supplier#000000002
+169Sq4bBH2FQEmaFOocY45sRTxo6yuoG17Supplier#000000008
+8Bk7ah4CK8SYQTepEmvMkkgMwg15Supplier#000000004
+10Gcdm2rJRzl5qlTVzc11Supplier#000000005
+6q1,G3Pj6OjIuUYfUoH18BFTKP5aU9bEV31Supplier#000000003
+14s,4TicNGB4uO6PaSqNBUq23Supplier#000000007
+12tQxuVm7s7CnK14Supplier#000000006
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u4_gby_distinct.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u4_gby_distinct.result
new file mode 100644
index 0000000..b30110b
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u4_gby_distinct.result
@@ -0,0 +1,10 @@
+598841.0
+598725.25
+598617.8
+59854.0
+598420.25
+595928.857142857142858
+595829.6
+595734.57142857142857
+595630.0
+595523.0
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u5_gby_global.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u5_gby_global.result
new file mode 100644
index 0000000..8828088
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u5_gby_global.result
@@ -0,0 +1 @@
+152398.0
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u6_large_card_join.result b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u6_large_card_join.result
new file mode 100644
index 0000000..82b4857
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/results/u6_large_card_join.result
@@ -0,0 +1,512 @@
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
+22
diff --git a/hivesterix/hivesterix-optimizer/pom.xml b/hivesterix/hivesterix-optimizer/pom.xml
new file mode 100644
index 0000000..4e6032e
--- /dev/null
+++ b/hivesterix/hivesterix-optimizer/pom.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<artifactId>hivesterix</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<artifactId>hivesterix-optimizer</artifactId>
+	<name>hivesterix-optimizer</name>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+					<encoding>UTF-8</encoding>
+					<fork>true</fork>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<dependencies>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hivesterix-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hivesterix-translator</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java
new file mode 100644
index 0000000..7e4e271
--- /dev/null
+++ b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java
@@ -0,0 +1,113 @@
+package edu.uci.ics.hivesterix.optimizer.rulecollections;

+

+import java.util.LinkedList;

+

+import edu.uci.ics.hivesterix.optimizer.rules.InsertProjectBeforeWriteRule;

+import edu.uci.ics.hivesterix.optimizer.rules.IntroduceEarlyProjectRule;

+import edu.uci.ics.hivesterix.optimizer.rules.LocalGroupByRule;

+import edu.uci.ics.hivesterix.optimizer.rules.RemoveRedundantSelectRule;

+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.HeuristicOptimizer;

+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.BreakSelectIntoConjunctsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ComplexJoinInferenceRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateAssignsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateSelectsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateSubplanRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.EnforceStructuralPropertiesRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractCommonOperatorsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractGbyExpressionsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.FactorRedundantGroupAndDecorVarsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InferTypesRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InlineVariablesRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InsertProjectBeforeUnionRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceAggregateCombinerRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceGroupByCombinerRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IsolateHyracksOperatorsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PullSelectOutOfEqJoin;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushLimitDownRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushProjectDownRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushProjectIntoDataSourceScanRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSelectDownRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSelectIntoJoinRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ReinferAllTypesRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.RemoveRedundantProjectionRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.RemoveUnusedAssignAndAggregateRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.SetAlgebricksPhysicalOperatorsRule;

+import edu.uci.ics.hyracks.algebricks.rewriter.rules.SetExecutionModeRule;

+

+public final class HiveRuleCollections {

+

+    public final static LinkedList<IAlgebraicRewriteRule> NORMALIZATION = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        NORMALIZATION.add(new EliminateSubplanRule());

+        NORMALIZATION.add(new IntroduceAggregateCombinerRule());

+        NORMALIZATION.add(new BreakSelectIntoConjunctsRule());

+        NORMALIZATION.add(new IntroduceAggregateCombinerRule());

+        NORMALIZATION.add(new PushSelectIntoJoinRule());

+        NORMALIZATION.add(new ExtractGbyExpressionsRule());

+        NORMALIZATION.add(new RemoveRedundantSelectRule());

+    }

+

+    public final static LinkedList<IAlgebraicRewriteRule> COND_PUSHDOWN_AND_JOIN_INFERENCE = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new PushSelectDownRule());

+        COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new InlineVariablesRule());

+        COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new FactorRedundantGroupAndDecorVarsRule());

+        COND_PUSHDOWN_AND_JOIN_INFERENCE.add(new EliminateSubplanRule());

+    }

+

+    public final static LinkedList<IAlgebraicRewriteRule> LOAD_FIELDS = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        // should LoadRecordFieldsRule be applied in only one pass over the

+        // plan?

+        LOAD_FIELDS.add(new InlineVariablesRule());

+        // LOAD_FIELDS.add(new RemoveUnusedAssignAndAggregateRule());

+        LOAD_FIELDS.add(new ComplexJoinInferenceRule());

+        LOAD_FIELDS.add(new InferTypesRule());

+    }

+

+    public final static LinkedList<IAlgebraicRewriteRule> OP_PUSHDOWN = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        OP_PUSHDOWN.add(new PushProjectDownRule());

+        OP_PUSHDOWN.add(new PushSelectDownRule());

+    }

+

+    public final static LinkedList<IAlgebraicRewriteRule> DATA_EXCHANGE = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        DATA_EXCHANGE.add(new SetExecutionModeRule());

+    }

+

+    public final static LinkedList<IAlgebraicRewriteRule> CONSOLIDATION = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        CONSOLIDATION.add(new RemoveRedundantProjectionRule());

+        CONSOLIDATION.add(new ConsolidateSelectsRule());

+        CONSOLIDATION.add(new IntroduceEarlyProjectRule());

+        CONSOLIDATION.add(new ConsolidateAssignsRule());

+        CONSOLIDATION.add(new IntroduceGroupByCombinerRule());

+        CONSOLIDATION.add(new RemoveUnusedAssignAndAggregateRule());

+    }

+

+    public final static LinkedList<IAlgebraicRewriteRule> PHYSICAL_PLAN_REWRITES = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        PHYSICAL_PLAN_REWRITES.add(new PullSelectOutOfEqJoin());

+        PHYSICAL_PLAN_REWRITES.add(new SetAlgebricksPhysicalOperatorsRule());

+        PHYSICAL_PLAN_REWRITES.add(new EnforceStructuralPropertiesRule());

+        PHYSICAL_PLAN_REWRITES.add(new PushProjectDownRule());

+        PHYSICAL_PLAN_REWRITES.add(new SetAlgebricksPhysicalOperatorsRule());

+        PHYSICAL_PLAN_REWRITES.add(new PushLimitDownRule());

+        PHYSICAL_PLAN_REWRITES.add(new InsertProjectBeforeWriteRule());

+        PHYSICAL_PLAN_REWRITES.add(new InsertProjectBeforeUnionRule());

+    }

+

+    public final static LinkedList<IAlgebraicRewriteRule> prepareJobGenRules = new LinkedList<IAlgebraicRewriteRule>();

+    static {

+        prepareJobGenRules.add(new ReinferAllTypesRule());

+        prepareJobGenRules.add(new IsolateHyracksOperatorsRule(

+                HeuristicOptimizer.hyraxOperatorsBelowWhichJobGenIsDisabled));

+        prepareJobGenRules.add(new ExtractCommonOperatorsRule());

+        prepareJobGenRules.add(new LocalGroupByRule());

+        prepareJobGenRules.add(new PushProjectIntoDataSourceScanRule());

+        prepareJobGenRules.add(new ReinferAllTypesRule());

+    }

+

+}

diff --git a/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/InsertProjectBeforeWriteRule.java b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/InsertProjectBeforeWriteRule.java
new file mode 100644
index 0000000..90777ee
--- /dev/null
+++ b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/InsertProjectBeforeWriteRule.java
@@ -0,0 +1,79 @@
+package edu.uci.ics.hivesterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.StreamProjectPOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class InsertProjectBeforeWriteRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    /**
+     * When the input schema to WriteOperator is different from the output
+     * schema in terms of variable order, add a project operator to get the
+     * write order
+     */
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.WRITE) {
+            return false;
+        }
+        WriteOperator opWrite = (WriteOperator) op;
+        ArrayList<LogicalVariable> finalSchema = new ArrayList<LogicalVariable>();
+        VariableUtilities.getUsedVariables(opWrite, finalSchema);
+        ArrayList<LogicalVariable> inputSchema = new ArrayList<LogicalVariable>();
+        VariableUtilities.getLiveVariables(opWrite, inputSchema);
+        if (!isIdentical(finalSchema, inputSchema)) {
+            ProjectOperator projectOp = new ProjectOperator(finalSchema);
+            Mutable<ILogicalOperator> parentOpRef = opWrite.getInputs().get(0);
+            projectOp.getInputs().add(parentOpRef);
+            opWrite.getInputs().clear();
+            opWrite.getInputs().add(new MutableObject<ILogicalOperator>(projectOp));
+            projectOp.setPhysicalOperator(new StreamProjectPOperator());
+            projectOp.setExecutionMode(ExecutionMode.PARTITIONED);
+
+            AbstractLogicalOperator op2 = (AbstractLogicalOperator) parentOpRef.getValue();
+            if (op2.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+                ProjectOperator pi2 = (ProjectOperator) op2;
+                parentOpRef.setValue(pi2.getInputs().get(0).getValue());
+            }
+            context.computeAndSetTypeEnvironmentForOperator(projectOp);
+            return true;
+        } else
+            return false;
+
+    }
+
+    private boolean isIdentical(List<LogicalVariable> finalSchema, List<LogicalVariable> inputSchema) {
+        int finalSchemaSize = finalSchema.size();
+        int inputSchemaSize = inputSchema.size();
+        if (finalSchemaSize != inputSchemaSize)
+            throw new IllegalStateException("final output schema variables missing!");
+        for (int i = 0; i < finalSchemaSize; i++) {
+            LogicalVariable var1 = finalSchema.get(i);
+            LogicalVariable var2 = inputSchema.get(i);
+            if (!var1.equals(var2))
+                return false;
+        }
+        return true;
+    }
+}
diff --git a/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/IntroduceEarlyProjectRule.java b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/IntroduceEarlyProjectRule.java
new file mode 100644
index 0000000..0a18629
--- /dev/null
+++ b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/IntroduceEarlyProjectRule.java
@@ -0,0 +1,73 @@
+package edu.uci.ics.hivesterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class IntroduceEarlyProjectRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.PROJECT) {
+            return false;
+        }
+        AbstractLogicalOperator middleOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        List<LogicalVariable> deliveredVars = new ArrayList<LogicalVariable>();
+        List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
+        List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
+
+        VariableUtilities.getUsedVariables(op, deliveredVars);
+        VariableUtilities.getUsedVariables(middleOp, usedVars);
+        VariableUtilities.getProducedVariables(middleOp, producedVars);
+
+        Set<LogicalVariable> requiredVariables = new HashSet<LogicalVariable>();
+        requiredVariables.addAll(deliveredVars);
+        requiredVariables.addAll(usedVars);
+        requiredVariables.removeAll(producedVars);
+
+        if (middleOp.getInputs().size() <= 0 || middleOp.getInputs().size() > 1)
+            return false;
+
+        AbstractLogicalOperator targetOp = (AbstractLogicalOperator) middleOp.getInputs().get(0).getValue();
+        if (targetOp.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN)
+            return false;
+
+        Set<LogicalVariable> deliveredEarlyVars = new HashSet<LogicalVariable>();
+        VariableUtilities.getLiveVariables(targetOp, deliveredEarlyVars);
+
+        deliveredEarlyVars.removeAll(requiredVariables);
+        if (deliveredEarlyVars.size() > 0) {
+            ArrayList<LogicalVariable> requiredVars = new ArrayList<LogicalVariable>();
+            requiredVars.addAll(requiredVariables);
+            ILogicalOperator earlyProjectOp = new ProjectOperator(requiredVars);
+            Mutable<ILogicalOperator> earlyProjectOpRef = new MutableObject<ILogicalOperator>(earlyProjectOp);
+            Mutable<ILogicalOperator> targetRef = middleOp.getInputs().get(0);
+            middleOp.getInputs().set(0, earlyProjectOpRef);
+            earlyProjectOp.getInputs().add(targetRef);
+            context.computeAndSetTypeEnvironmentForOperator(earlyProjectOp);
+            return true;
+        }
+        return false;
+    }
+}
diff --git a/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/LocalGroupByRule.java b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/LocalGroupByRule.java
new file mode 100644
index 0000000..90ca008
--- /dev/null
+++ b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/LocalGroupByRule.java
@@ -0,0 +1,66 @@
+package edu.uci.ics.hivesterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hivesterix.logical.plan.HiveOperatorAnnotations;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IPhysicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.OneToOneExchangePOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class LocalGroupByRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.GROUP) {
+            return false;
+        }
+        Boolean localGby = (Boolean) op.getAnnotations().get(HiveOperatorAnnotations.LOCAL_GROUP_BY);
+        if (localGby != null && localGby.equals(Boolean.TRUE)) {
+            Boolean hashGby = (Boolean) op.getAnnotations().get(OperatorAnnotations.USE_HASH_GROUP_BY);
+            Boolean externalGby = (Boolean) op.getAnnotations().get(OperatorAnnotations.USE_EXTERNAL_GROUP_BY);
+            if ((hashGby != null && (hashGby.equals(Boolean.TRUE)) || (externalGby != null && externalGby
+                    .equals(Boolean.TRUE)))) {
+                reviseExchange(op);
+            } else {
+                ILogicalOperator child = op.getInputs().get(0).getValue();
+                AbstractLogicalOperator childOp = (AbstractLogicalOperator) child;
+                while (child.getInputs().size() > 0) {
+                    if (childOp.getOperatorTag() == LogicalOperatorTag.ORDER)
+                        break;
+                    else {
+                        child = child.getInputs().get(0).getValue();
+                        childOp = (AbstractLogicalOperator) child;
+                    }
+                }
+                if (childOp.getOperatorTag() == LogicalOperatorTag.ORDER)
+                    reviseExchange(childOp);
+            }
+            return true;
+        }
+        return false;
+    }
+
+    private void reviseExchange(AbstractLogicalOperator op) {
+        ExchangeOperator exchange = (ExchangeOperator) op.getInputs().get(0).getValue();
+        IPhysicalOperator physicalOp = exchange.getPhysicalOperator();
+        if (physicalOp.getOperatorTag() == PhysicalOperatorTag.HASH_PARTITION_EXCHANGE) {
+            exchange.setPhysicalOperator(new OneToOneExchangePOperator());
+        }
+    }
+
+}
diff --git a/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/RemoveRedundantSelectRule.java b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/RemoveRedundantSelectRule.java
new file mode 100644
index 0000000..44ff12d
--- /dev/null
+++ b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rules/RemoveRedundantSelectRule.java
@@ -0,0 +1,44 @@
+package edu.uci.ics.hivesterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class RemoveRedundantSelectRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        AbstractLogicalOperator inputOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        if (inputOp.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        SelectOperator selectOp = (SelectOperator) op;
+        SelectOperator inputSelectOp = (SelectOperator) inputOp;
+        ILogicalExpression expr1 = selectOp.getCondition().getValue();
+        ILogicalExpression expr2 = inputSelectOp.getCondition().getValue();
+
+        if (expr1.equals(expr2)) {
+            selectOp.getInputs().set(0, inputSelectOp.getInputs().get(0));
+            return true;
+        }
+        return false;
+    }
+
+}
diff --git a/hivesterix/hivesterix-runtime/pom.xml b/hivesterix/hivesterix-runtime/pom.xml
new file mode 100644
index 0000000..24e8c11
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/pom.xml
@@ -0,0 +1,371 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<groupId>edu.uci.ics.hyracks</groupId>
+	<artifactId>hivesterix-runtime</artifactId>
+	<version>0.2.3-SNAPSHOT</version>
+	<name>hivesterix-runtime</name>
+
+	<dependencies>
+		<dependency>
+			<groupId>javax.servlet</groupId>
+			<artifactId>servlet-api</artifactId>
+			<version>2.5</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.8.1</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>args4j</groupId>
+			<artifactId>args4j</artifactId>
+			<version>2.0.12</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.json</groupId>
+			<artifactId>json</artifactId>
+			<version>20090211</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.eclipse.jetty</groupId>
+			<artifactId>jetty-server</artifactId>
+			<version>8.0.0.M1</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.eclipse.jetty</groupId>
+			<artifactId>jetty-servlet</artifactId>
+			<version>8.0.0.M1</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>jline</groupId>
+			<artifactId>jline</artifactId>
+			<version>0.9.94</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.datanucleus</groupId>
+			<artifactId>datanucleus-core</artifactId>
+			<version>2.0.3</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.datanucleus</groupId>
+			<artifactId>datanucleus-connectionpool</artifactId>
+			<version>2.0.3</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.datanucleus</groupId>
+			<artifactId>datanucleus-enhancer</artifactId>
+			<version>2.0.3</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.datanucleus</groupId>
+			<artifactId>datanucleus-rdbms</artifactId>
+			<version>2.0.3</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-dbcp</groupId>
+			<artifactId>commons-dbcp</artifactId>
+			<version>1.4</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-pool</groupId>
+			<artifactId>commons-pool</artifactId>
+			<version>1.5.4</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-collections</groupId>
+			<artifactId>commons-collections</artifactId>
+			<version>3.2.1</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-lang</groupId>
+			<artifactId>commons-lang</artifactId>
+			<version>2.4</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>javax</groupId>
+			<artifactId>jdo2-api</artifactId>
+			<version>2.3-ec</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>com.facebook</groupId>
+			<artifactId>libfb303</artifactId>
+			<version>0.5.0</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.thrift</groupId>
+			<artifactId>libthrift</artifactId>
+			<version>0.5.0</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.commons</groupId>
+			<artifactId>cli</artifactId>
+			<version>1.2</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache</groupId>
+			<artifactId>log4j</artifactId>
+			<version>1.2.15</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.antlr</groupId>
+			<artifactId>antlr-runtime</artifactId>
+			<version>3.0.1</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-cli</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-common</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-exec</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-hwi</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-jdbc</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-metastore</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-service</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-shims</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-serde</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-api</artifactId>
+			<version>1.6.1</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-cli</groupId>
+			<artifactId>commons-cli</artifactId>
+			<version>1.2</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-log4j12</artifactId>
+			<version>1.6.1</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>commons-logging</groupId>
+			<artifactId>commons-logging</artifactId>
+			<version>1.1.1</version>
+			<type>jar</type>
+			<classifier>api</classifier>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>com.google.guava</groupId>
+			<artifactId>guava</artifactId>
+			<version>r06</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.antlr</groupId>
+			<artifactId>stringtemplate</artifactId>
+			<version>3.2</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.derby</groupId>
+			<artifactId>derby</artifactId>
+			<version>10.8.1.2</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hbase</groupId>
+			<artifactId>hbase</artifactId>
+			<version>0.90.3</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>algebricks-compiler</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-cc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-nc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hivesterix-serde</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hivesterix-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+					<encoding>UTF-8</encoding>
+					<fork>true</fork>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-jar-plugin</artifactId>
+				<executions>
+					<execution>
+						<id>patch</id>
+						<goals>
+							<goal>jar</goal>
+						</goals>
+						<phase>package</phase>
+						<configuration>
+							<classifier>patch</classifier>
+							<finalName>a-hive-rumtime</finalName>
+							<includes>
+								<include>**/org/apache/**</include>
+							</includes>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+	<repositories>
+		<repository>
+			<releases>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>warn</checksumPolicy>
+			</releases>
+			<snapshots>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>fail</checksumPolicy>
+			</snapshots>
+			<id>third-party</id>
+			<url>http://obelix.ics.uci.edu/nexus/content/repositories/third-party</url>
+		</repository>
+		<repository>
+			<releases>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>warn</checksumPolicy>
+			</releases>
+			<snapshots>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>fail</checksumPolicy>
+			</snapshots>
+			<id>hyracks-public-release</id>
+			<url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-public-releases</url>
+		</repository>
+	</repositories>
+</project>
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AbstractExpressionEvaluator.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AbstractExpressionEvaluator.java
new file mode 100644
index 0000000..ad02239
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AbstractExpressionEvaluator.java
@@ -0,0 +1,169 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import java.io.DataOutput;

+import java.io.IOException;

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.serde2.SerDe;

+import org.apache.hadoop.hive.serde2.SerDeException;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.io.BytesWritable;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyFactory;

+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;

+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;

+

+public abstract class AbstractExpressionEvaluator implements ICopyEvaluator {

+

+    private List<ICopyEvaluator> children;

+

+    private ExprNodeEvaluator evaluator;

+

+    private IDataOutputProvider out;

+

+    private ObjectInspector inspector;

+

+    /**

+     * output object inspector

+     */

+    private ObjectInspector outputInspector;

+

+    /**

+     * cached row object

+     */

+    private LazyObject<? extends ObjectInspector> cachedRowObject;

+

+    /**

+     * serializer/derialzer for lazy object

+     */

+    private SerDe lazySer;

+

+    /**

+     * data output

+     */

+    DataOutput dataOutput;

+

+    public AbstractExpressionEvaluator(ExprNodeEvaluator hiveEvaluator, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        evaluator = hiveEvaluator;

+        out = output;

+        inspector = oi;

+        dataOutput = out.getDataOutput();

+    }

+

+    protected ObjectInspector getRowInspector() {

+        return null;

+    }

+

+    protected IDataOutputProvider getIDataOutputProvider() {

+        return out;

+    }

+

+    protected ExprNodeEvaluator getHiveEvaluator() {

+        return evaluator;

+    }

+

+    public ObjectInspector getObjectInspector() {

+        return inspector;

+    }

+

+    @Override

+    public void evaluate(IFrameTupleReference r) throws AlgebricksException {

+        // initialize hive evaluator

+        try {

+            if (outputInspector == null)

+                outputInspector = evaluator.initialize(inspector);

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e.getMessage());

+        }

+

+        readIntoCache(r);

+        try {

+            Object result = evaluator.evaluate(cachedRowObject);

+

+            // if (result == null) {

+            // result = evaluator.evaluate(cachedRowObject);

+            //

+            // // check if result is null

+            //

+            // String errorMsg = "serialize null object in  \n output " +

+            // outputInspector.toString() + " \n input "

+            // + inspector.toString() + "\n ";

+            // errorMsg += "";

+            // List<Object> columns = ((StructObjectInspector)

+            // inspector).getStructFieldsDataAsList(cachedRowObject);

+            // for (Object column : columns) {

+            // errorMsg += column.toString() + " ";

+            // }

+            // errorMsg += "\n";

+            // Log.info(errorMsg);

+            // System.out.println(errorMsg);

+            // // result = new BooleanWritable(true);

+            // throw new IllegalStateException(errorMsg);

+            // }

+

+            serializeResult(result);

+        } catch (HiveException e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e.getMessage());

+        } catch (IOException e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e.getMessage());

+        }

+    }

+

+    /**

+     * serialize the result

+     * 

+     * @param result

+     *            the evaluation result

+     * @throws IOException

+     * @throws AlgebricksException

+     */

+    private void serializeResult(Object result) throws IOException, AlgebricksException {

+        if (lazySer == null)

+            lazySer = new LazySerDe();

+

+        try {

+            BytesWritable outputWritable = (BytesWritable) lazySer.serialize(result, outputInspector);

+            dataOutput.write(outputWritable.getBytes(), 0, outputWritable.getLength());

+        } catch (SerDeException e) {

+            throw new AlgebricksException(e);

+        }

+    }

+

+    /**

+     * bind the tuple reference to the cached row object

+     * 

+     * @param r

+     */

+    private void readIntoCache(IFrameTupleReference r) {

+        if (cachedRowObject == null)

+            cachedRowObject = (LazyObject<? extends ObjectInspector>) LazyFactory.createLazyObject(inspector);

+        cachedRowObject.init(r);

+    }

+

+    /**

+     * set a list of children of this evaluator

+     * 

+     * @param children

+     */

+    public void setChildren(List<ICopyEvaluator> children) {

+        this.children = children;

+    }

+

+    public void addChild(ICopyEvaluator child) {

+        if (children == null)

+            children = new ArrayList<ICopyEvaluator>();

+        children.add(child);

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregationFunctionEvaluator.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregationFunctionEvaluator.java
new file mode 100644
index 0000000..e500376
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregationFunctionEvaluator.java
@@ -0,0 +1,224 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import java.io.DataOutput;

+import java.io.IOException;

+import java.util.List;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCount;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;

+import org.apache.hadoop.hive.serde2.SerDe;

+import org.apache.hadoop.hive.serde2.SerDeException;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.io.BytesWritable;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyAggregateFunction;

+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;

+

+public class AggregationFunctionEvaluator implements ICopyAggregateFunction {

+

+    /**

+     * the mode of aggregation function

+     */

+    private GenericUDAFEvaluator.Mode mode;

+

+    /**

+     * an array of evaluators

+     */

+    private ExprNodeEvaluator[] evaluators;

+

+    /**

+     * udaf evaluator partial

+     */

+    private GenericUDAFEvaluator udafPartial;

+

+    /**

+     * udaf evaluator complete

+     */

+    private GenericUDAFEvaluator udafComplete;

+

+    /**

+     * cached parameter objects

+     */

+    private Object[] cachedParameters;

+

+    /**

+     * cached row objects

+     */

+    private LazyObject<? extends ObjectInspector> cachedRowObject;

+

+    /**

+     * the output channel

+     */

+    private DataOutput out;

+

+    /**

+     * aggregation buffer

+     */

+    private AggregationBuffer aggBuffer;

+

+    /**

+     * we only use lazy serde to do serialization

+     */

+    private SerDe lazySer;

+

+    /**

+     * the output object inspector for this aggregation function

+     */

+    private ObjectInspector outputInspector;

+

+    /**

+     * the output object inspector for this aggregation function

+     */

+    private ObjectInspector outputInspectorPartial;

+

+    /**

+     * parameter inspectors

+     */

+    private ObjectInspector[] parameterInspectors;

+

+    /**

+     * output make sure the aggregation functio has least object creation

+     * 

+     * @param desc

+     * @param oi

+     * @param output

+     */

+    public AggregationFunctionEvaluator(List<ExprNodeDesc> inputs, List<TypeInfo> inputTypes, String genericUDAFName,

+            GenericUDAFEvaluator.Mode aggMode, boolean distinct, ObjectInspector oi, DataOutput output,

+            ExprNodeEvaluator[] evals, ObjectInspector[] pInspectors, Object[] parameterCache, SerDe serde,

+            LazyObject<? extends ObjectInspector> row, GenericUDAFEvaluator udafunctionPartial,

+            GenericUDAFEvaluator udafunctionComplete, ObjectInspector outputOi, ObjectInspector outputOiPartial) {

+        // shared object across threads

+        this.out = output;

+        this.mode = aggMode;

+        this.parameterInspectors = pInspectors;

+

+        // thread local objects

+        this.evaluators = evals;

+        this.cachedParameters = parameterCache;

+        this.cachedRowObject = row;

+        this.lazySer = serde;

+        this.udafPartial = udafunctionPartial;

+        this.udafComplete = udafunctionComplete;

+        this.outputInspector = outputOi;

+        this.outputInspectorPartial = outputOiPartial;

+    }

+

+    @Override

+    public void init() throws AlgebricksException {

+        try {

+            aggBuffer = udafPartial.getNewAggregationBuffer();

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        }

+    }

+

+    @Override

+    public void step(IFrameTupleReference tuple) throws AlgebricksException {

+        readIntoCache(tuple);

+        processRow();

+    }

+

+    private void processRow() throws AlgebricksException {

+        try {

+            // get values by evaluating them

+            for (int i = 0; i < cachedParameters.length; i++) {

+                cachedParameters[i] = evaluators[i].evaluate(cachedRowObject);

+            }

+            processAggregate();

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        }

+    }

+

+    private void processAggregate() throws HiveException {

+        /**

+         * accumulate the aggregation function

+         */

+        switch (mode) {

+            case PARTIAL1:

+            case COMPLETE:

+                udafPartial.iterate(aggBuffer, cachedParameters);

+                break;

+            case PARTIAL2:

+            case FINAL:

+                if (udafPartial instanceof GenericUDAFCount.GenericUDAFCountEvaluator) {

+                    Object parameter = ((PrimitiveObjectInspector) parameterInspectors[0])

+                            .getPrimitiveWritableObject(cachedParameters[0]);

+                    udafPartial.merge(aggBuffer, parameter);

+                } else

+                    udafPartial.merge(aggBuffer, cachedParameters[0]);

+                break;

+            default:

+                break;

+        }

+    }

+

+    /**

+     * serialize the result

+     * 

+     * @param result

+     *            the evaluation result

+     * @throws IOException

+     * @throws AlgebricksException

+     */

+    private void serializeResult(Object result, ObjectInspector oi) throws IOException, AlgebricksException {

+        try {

+            BytesWritable outputWritable = (BytesWritable) lazySer.serialize(result, oi);

+            out.write(outputWritable.getBytes(), 0, outputWritable.getLength());

+        } catch (SerDeException e) {

+            throw new AlgebricksException(e);

+        }

+    }

+

+    /**

+     * bind the tuple reference to the cached row object

+     * 

+     * @param r

+     */

+    private void readIntoCache(IFrameTupleReference r) {

+        cachedRowObject.init(r);

+    }

+

+    @Override

+    public void finish() throws AlgebricksException {

+        // aggregator

+        try {

+            Object result = null;

+            result = udafPartial.terminatePartial(aggBuffer);

+            if (mode == GenericUDAFEvaluator.Mode.COMPLETE || mode == GenericUDAFEvaluator.Mode.FINAL) {

+                result = udafComplete.terminate(aggBuffer);

+                serializeResult(result, outputInspector);

+            } else {

+                serializeResult(result, outputInspectorPartial);

+            }

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        } catch (IOException e) {

+            throw new AlgebricksException(e);

+        }

+    }

+

+    @Override

+    public void finishPartial() throws AlgebricksException {

+        // aggregator.

+        try {

+            Object result = null;

+            // get aggregations

+            result = udafPartial.terminatePartial(aggBuffer);

+            serializeResult(result, outputInspectorPartial);

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        } catch (IOException e) {

+            throw new AlgebricksException(e);

+        }

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregatuibFunctionSerializableEvaluator.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregatuibFunctionSerializableEvaluator.java
new file mode 100644
index 0000000..1933253
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/AggregatuibFunctionSerializableEvaluator.java
@@ -0,0 +1,248 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCount;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.BytesWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunction;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AggregatuibFunctionSerializableEvaluator implements ICopySerializableAggregateFunction {
+
+    /**
+     * the mode of aggregation function
+     */
+    private GenericUDAFEvaluator.Mode mode;
+
+    /**
+     * an array of evaluators
+     */
+    private ExprNodeEvaluator[] evaluators;
+
+    /**
+     * udaf evaluator partial
+     */
+    private GenericUDAFEvaluator udafPartial;
+
+    /**
+     * udaf evaluator complete
+     */
+    private GenericUDAFEvaluator udafComplete;
+
+    /**
+     * cached parameter objects
+     */
+    private Object[] cachedParameters;
+
+    /**
+     * cached row objects
+     */
+    private LazyObject<? extends ObjectInspector> cachedRowObject;
+
+    /**
+     * aggregation buffer
+     */
+    private SerializableBuffer aggBuffer;
+
+    /**
+     * we only use lazy serde to do serialization
+     */
+    private SerDe lazySer;
+
+    /**
+     * the output object inspector for this aggregation function
+     */
+    private ObjectInspector outputInspector;
+
+    /**
+     * the output object inspector for this aggregation function
+     */
+    private ObjectInspector outputInspectorPartial;
+
+    /**
+     * parameter inspectors
+     */
+    private ObjectInspector[] parameterInspectors;
+
+    /**
+     * output make sure the aggregation functio has least object creation
+     * 
+     * @param desc
+     * @param oi
+     * @param output
+     */
+    public AggregatuibFunctionSerializableEvaluator(List<ExprNodeDesc> inputs, List<TypeInfo> inputTypes,
+            String genericUDAFName, GenericUDAFEvaluator.Mode aggMode, boolean distinct, ObjectInspector oi,
+            ExprNodeEvaluator[] evals, ObjectInspector[] pInspectors, Object[] parameterCache, SerDe serde,
+            LazyObject<? extends ObjectInspector> row, GenericUDAFEvaluator udafunctionPartial,
+            GenericUDAFEvaluator udafunctionComplete, ObjectInspector outputOi, ObjectInspector outputOiPartial)
+            throws AlgebricksException {
+        // shared object across threads
+        this.mode = aggMode;
+        this.parameterInspectors = pInspectors;
+
+        // thread local objects
+        this.evaluators = evals;
+        this.cachedParameters = parameterCache;
+        this.cachedRowObject = row;
+        this.lazySer = serde;
+        this.udafPartial = udafunctionPartial;
+        this.udafComplete = udafunctionComplete;
+        this.outputInspector = outputOi;
+        this.outputInspectorPartial = outputOiPartial;
+
+        try {
+            aggBuffer = (SerializableBuffer) udafPartial.getNewAggregationBuffer();
+        } catch (HiveException e) {
+            throw new AlgebricksException(e);
+        }
+    }
+
+    @Override
+    public void init(DataOutput output) throws AlgebricksException {
+        try {
+            udafPartial.reset(aggBuffer);
+            outputAggBuffer(aggBuffer, output);
+        } catch (HiveException e) {
+            throw new AlgebricksException(e);
+        }
+    }
+
+    @Override
+    public void step(IFrameTupleReference tuple, byte[] data, int start, int len) throws AlgebricksException {
+        deSerializeAggBuffer(aggBuffer, data, start, len);
+        readIntoCache(tuple);
+        processRow();
+        serializeAggBuffer(aggBuffer, data, start, len);
+    }
+
+    private void processRow() throws AlgebricksException {
+        try {
+            // get values by evaluating them
+            for (int i = 0; i < cachedParameters.length; i++) {
+                cachedParameters[i] = evaluators[i].evaluate(cachedRowObject);
+            }
+            processAggregate();
+        } catch (HiveException e) {
+            throw new AlgebricksException(e);
+        }
+    }
+
+    private void processAggregate() throws HiveException {
+        /**
+         * accumulate the aggregation function
+         */
+        switch (mode) {
+            case PARTIAL1:
+            case COMPLETE:
+                udafPartial.iterate(aggBuffer, cachedParameters);
+                break;
+            case PARTIAL2:
+            case FINAL:
+                if (udafPartial instanceof GenericUDAFCount.GenericUDAFCountEvaluator) {
+                    Object parameter = ((PrimitiveObjectInspector) parameterInspectors[0])
+                            .getPrimitiveWritableObject(cachedParameters[0]);
+                    udafPartial.merge(aggBuffer, parameter);
+                } else
+                    udafPartial.merge(aggBuffer, cachedParameters[0]);
+                break;
+            default:
+                break;
+        }
+    }
+
+    /**
+     * serialize the result
+     * 
+     * @param result
+     *            the evaluation result
+     * @throws IOException
+     * @throws AlgebricksException
+     */
+    private void serializeResult(Object result, ObjectInspector oi, DataOutput out) throws IOException,
+            AlgebricksException {
+        try {
+            BytesWritable outputWritable = (BytesWritable) lazySer.serialize(result, oi);
+            out.write(outputWritable.getBytes(), 0, outputWritable.getLength());
+        } catch (SerDeException e) {
+            throw new AlgebricksException(e);
+        }
+    }
+
+    /**
+     * bind the tuple reference to the cached row object
+     * 
+     * @param r
+     */
+    private void readIntoCache(IFrameTupleReference r) {
+        cachedRowObject.init(r);
+    }
+
+    @Override
+    public void finish(byte[] data, int start, int len, DataOutput output) throws AlgebricksException {
+        deSerializeAggBuffer(aggBuffer, data, start, len);
+        // aggregator
+        try {
+            Object result = null;
+            result = udafPartial.terminatePartial(aggBuffer);
+            if (mode == GenericUDAFEvaluator.Mode.COMPLETE || mode == GenericUDAFEvaluator.Mode.FINAL) {
+                result = udafComplete.terminate(aggBuffer);
+                serializeResult(result, outputInspector, output);
+            } else {
+                serializeResult(result, outputInspectorPartial, output);
+            }
+        } catch (HiveException e) {
+            throw new AlgebricksException(e);
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
+        }
+    }
+
+    @Override
+    public void finishPartial(byte[] data, int start, int len, DataOutput output) throws AlgebricksException {
+        deSerializeAggBuffer(aggBuffer, data, start, len);
+        // aggregator.
+        try {
+            Object result = null;
+            // get aggregations
+            result = udafPartial.terminatePartial(aggBuffer);
+            serializeResult(result, outputInspectorPartial, output);
+        } catch (HiveException e) {
+            throw new AlgebricksException(e);
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
+        }
+    }
+
+    private void serializeAggBuffer(SerializableBuffer buffer, byte[] data, int start, int len)
+            throws AlgebricksException {
+        buffer.serializeAggBuffer(data, start, len);
+    }
+
+    private void deSerializeAggBuffer(SerializableBuffer buffer, byte[] data, int start, int len)
+            throws AlgebricksException {
+        buffer.deSerializeAggBuffer(data, start, len);
+    }
+
+    private void outputAggBuffer(SerializableBuffer buffer, DataOutput out) throws AlgebricksException {
+        try {
+            buffer.serializeAggBuffer(out);
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
+        }
+    }
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/BufferSerDeUtil.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/BufferSerDeUtil.java
new file mode 100644
index 0000000..96065e5
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/BufferSerDeUtil.java
@@ -0,0 +1,67 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;
+
+public class BufferSerDeUtil {
+
+    public static double getDouble(byte[] bytes, int offset) {
+        return Double.longBitsToDouble(getLong(bytes, offset));
+    }
+
+    public static float getFloat(byte[] bytes, int offset) {
+        return Float.intBitsToFloat(getInt(bytes, offset));
+    }
+
+    public static boolean getBoolean(byte[] bytes, int offset) {
+        if (bytes[offset] == 0)
+            return false;
+        else
+            return true;
+    }
+
+    public static int getInt(byte[] bytes, int offset) {
+        return ((bytes[offset] & 0xff) << 24) + ((bytes[offset + 1] & 0xff) << 16) + ((bytes[offset + 2] & 0xff) << 8)
+                + ((bytes[offset + 3] & 0xff) << 0);
+    }
+
+    public static long getLong(byte[] bytes, int offset) {
+        return (((long) (bytes[offset] & 0xff)) << 56) + (((long) (bytes[offset + 1] & 0xff)) << 48)
+                + (((long) (bytes[offset + 2] & 0xff)) << 40) + (((long) (bytes[offset + 3] & 0xff)) << 32)
+                + (((long) (bytes[offset + 4] & 0xff)) << 24) + (((long) (bytes[offset + 5] & 0xff)) << 16)
+                + (((long) (bytes[offset + 6] & 0xff)) << 8) + (((long) (bytes[offset + 7] & 0xff)) << 0);
+    }
+
+    public static void writeBoolean(boolean value, byte[] bytes, int offset) {
+        if (value)
+            bytes[offset] = (byte) 1;
+        else
+            bytes[offset] = (byte) 0;
+    }
+
+    public static void writeInt(int value, byte[] bytes, int offset) {
+        bytes[offset++] = (byte) (value >> 24);
+        bytes[offset++] = (byte) (value >> 16);
+        bytes[offset++] = (byte) (value >> 8);
+        bytes[offset++] = (byte) (value);
+    }
+
+    public static void writeLong(long value, byte[] bytes, int offset) {
+        bytes[offset++] = (byte) (value >> 56);
+        bytes[offset++] = (byte) (value >> 48);
+        bytes[offset++] = (byte) (value >> 40);
+        bytes[offset++] = (byte) (value >> 32);
+        bytes[offset++] = (byte) (value >> 24);
+        bytes[offset++] = (byte) (value >> 16);
+        bytes[offset++] = (byte) (value >> 8);
+        bytes[offset++] = (byte) (value);
+    }
+
+    public static void writeDouble(double value, byte[] bytes, int offset) {
+        long lValue = Double.doubleToLongBits(value);
+        writeLong(lValue, bytes, offset);
+    }
+
+    public static void writeFloat(float value, byte[] bytes, int offset) {
+        int iValue = Float.floatToIntBits(value);
+        writeInt(iValue, bytes, offset);
+    }
+
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ColumnExpressionEvaluator.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ColumnExpressionEvaluator.java
new file mode 100644
index 0000000..5647f6a
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ColumnExpressionEvaluator.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeColumnEvaluator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class ColumnExpressionEvaluator extends AbstractExpressionEvaluator {

+

+    public ColumnExpressionEvaluator(ExprNodeColumnDesc expr, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        super(new ExprNodeColumnEvaluator(expr), oi, output);

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ConstantExpressionEvaluator.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ConstantExpressionEvaluator.java
new file mode 100644
index 0000000..d8796ea
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/ConstantExpressionEvaluator.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeConstantEvaluator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class ConstantExpressionEvaluator extends AbstractExpressionEvaluator {

+

+    public ConstantExpressionEvaluator(ExprNodeConstantDesc expr, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        super(new ExprNodeConstantEvaluator(expr), oi, output);

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FieldExpressionEvaluator.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FieldExpressionEvaluator.java
new file mode 100644
index 0000000..35560b6
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FieldExpressionEvaluator.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeFieldEvaluator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class FieldExpressionEvaluator extends AbstractExpressionEvaluator {

+

+    public FieldExpressionEvaluator(ExprNodeFieldDesc expr, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        super(new ExprNodeFieldEvaluator(expr), oi, output);

+    }

+

+}
\ No newline at end of file
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FunctionExpressionEvaluator.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FunctionExpressionEvaluator.java
new file mode 100644
index 0000000..7ffec7a
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/FunctionExpressionEvaluator.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class FunctionExpressionEvaluator extends AbstractExpressionEvaluator {

+

+    public FunctionExpressionEvaluator(ExprNodeGenericFuncDesc expr, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        super(new ExprNodeGenericFuncEvaluator(expr), oi, output);

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/NullExpressionEvaluator.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/NullExpressionEvaluator.java
new file mode 100644
index 0000000..ca60385
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/NullExpressionEvaluator.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import org.apache.hadoop.hive.ql.exec.ExprNodeNullEvaluator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class NullExpressionEvaluator extends AbstractExpressionEvaluator {

+

+    public NullExpressionEvaluator(ExprNodeNullDesc expr, ObjectInspector oi, IDataOutputProvider output)

+            throws AlgebricksException {

+        super(new ExprNodeNullEvaluator(expr), oi, output);

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/SerializableBuffer.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/SerializableBuffer.java
new file mode 100644
index 0000000..676989e
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/SerializableBuffer.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
+
+public interface SerializableBuffer extends AggregationBuffer {
+
+    public void deSerializeAggBuffer(byte[] data, int start, int len);
+
+    public void serializeAggBuffer(byte[] data, int start, int len);
+
+    public void serializeAggBuffer(DataOutput output) throws IOException;
+
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/UDTFFunctionEvaluator.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/UDTFFunctionEvaluator.java
new file mode 100644
index 0000000..2e78663
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/evaluator/UDTFFunctionEvaluator.java
@@ -0,0 +1,143 @@
+package edu.uci.ics.hivesterix.runtime.evaluator;

+

+import java.io.DataOutput;

+import java.io.IOException;

+

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.ql.udf.generic.Collector;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;

+import org.apache.hadoop.hive.serde2.SerDe;

+import org.apache.hadoop.hive.serde2.SerDeException;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.io.BytesWritable;

+

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hivesterix.serde.lazy.LazyColumnar;

+import edu.uci.ics.hivesterix.serde.lazy.LazyFactory;

+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;

+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyUnnestingFunction;

+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;

+

+public class UDTFFunctionEvaluator implements ICopyUnnestingFunction, Collector {

+

+    /**

+     * udtf function

+     */

+    private UDTFDesc func;

+

+    /**

+     * input object inspector

+     */

+    private ObjectInspector inputInspector;

+

+    /**

+     * output object inspector

+     */

+    private ObjectInspector outputInspector;

+

+    /**

+     * object inspector for udtf

+     */

+    private ObjectInspector[] udtfInputOIs;

+

+    /**

+     * generic udtf

+     */

+    private GenericUDTF udtf;

+

+    /**

+     * data output

+     */

+    private DataOutput out;

+

+    /**

+     * the input row object

+     */

+    private LazyColumnar cachedRowObject;

+

+    /**

+     * cached row object (input)

+     */

+    private Object[] cachedInputObjects;

+

+    /**

+     * serialization/deserialization

+     */

+    private SerDe lazySerDe;

+

+    /**

+     * columns feed into UDTF

+     */

+    private int[] columns;

+

+    public UDTFFunctionEvaluator(UDTFDesc desc, Schema schema, int[] cols, DataOutput output) {

+        this.func = desc;

+        this.inputInspector = schema.toObjectInspector();

+        udtf = func.getGenericUDTF();

+        out = output;

+        columns = cols;

+    }

+

+    @Override

+    public void init(IFrameTupleReference tuple) throws AlgebricksException {

+        cachedInputObjects = new LazyObject[columns.length];

+        try {

+            cachedRowObject = (LazyColumnar) LazyFactory.createLazyObject(inputInspector);

+            outputInspector = udtf.initialize(udtfInputOIs);

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        }

+        udtf.setCollector(this);

+        lazySerDe = new LazySerDe();

+        readIntoCache(tuple);

+    }

+

+    @Override

+    public boolean step() throws AlgebricksException {

+        try {

+            udtf.process(cachedInputObjects);

+            return true;

+        } catch (HiveException e) {

+            throw new AlgebricksException(e);

+        }

+    }

+

+    /**

+     * bind the tuple reference to the cached row object

+     * 

+     * @param r

+     */

+    private void readIntoCache(IFrameTupleReference r) {

+        cachedRowObject.init(r);

+        for (int i = 0; i < cachedInputObjects.length; i++) {

+            cachedInputObjects[i] = cachedRowObject.getField(columns[i]);

+        }

+    }

+

+    /**

+     * serialize the result

+     * 

+     * @param result

+     *            the evaluation result

+     * @throws IOException

+     * @throws AlgebricksException

+     */

+    private void serializeResult(Object result) throws SerDeException, IOException {

+        BytesWritable outputWritable = (BytesWritable) lazySerDe.serialize(result, outputInspector);

+        out.write(outputWritable.getBytes(), 0, outputWritable.getLength());

+    }

+

+    @Override

+    public void collect(Object input) throws HiveException {

+        try {

+            serializeResult(input);

+        } catch (IOException e) {

+            throw new HiveException(e);

+        } catch (SerDeException e) {

+            throw new HiveException(e);

+        }

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryAscComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..f3b76e4
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryAscComparatorFactory.java
@@ -0,0 +1,34 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveByteBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveByteBinaryAscComparatorFactory INSTANCE = new HiveByteBinaryAscComparatorFactory();

+

+    private HiveByteBinaryAscComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private byte left;

+            private byte right;

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = b1[s1];

+                right = b2[s2];

+                if (left > right)

+                    return 1;

+                else if (left == right)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryDescComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..8d452dc
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveByteBinaryDescComparatorFactory.java
@@ -0,0 +1,33 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveByteBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveByteBinaryDescComparatorFactory INSTANCE = new HiveByteBinaryDescComparatorFactory();

+

+    private HiveByteBinaryDescComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private byte left;

+            private byte right;

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = b1[s1];

+                right = b2[s2];

+                if (left > right)

+                    return -1;

+                else if (left == right)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryAscComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..0b5350a
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryAscComparatorFactory.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveDoubleBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveDoubleBinaryAscComparatorFactory INSTANCE = new HiveDoubleBinaryAscComparatorFactory();

+

+    private HiveDoubleBinaryAscComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private double left;

+            private double right;

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b1, s1));

+                right = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b2, s2));

+                if (left > right)

+                    return 1;

+                else if (left == right)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryDescComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..2405956
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveDoubleBinaryDescComparatorFactory.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveDoubleBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveDoubleBinaryDescComparatorFactory INSTANCE = new HiveDoubleBinaryDescComparatorFactory();

+

+    private HiveDoubleBinaryDescComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private double left;

+            private double right;

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b1, s1));

+                right = Double.longBitsToDouble(LazyUtils.byteArrayToLong(b2, s2));

+                if (left > right)

+                    return -1;

+                else if (left == right)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryAscComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..05a43e6
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryAscComparatorFactory.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveFloatBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveFloatBinaryAscComparatorFactory INSTANCE = new HiveFloatBinaryAscComparatorFactory();

+

+    private HiveFloatBinaryAscComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private float left;

+            private float right;

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b1, s1));

+                right = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b2, s2));

+                if (left > right)

+                    return 1;

+                else if (left == right)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryDescComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..2c44f97
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveFloatBinaryDescComparatorFactory.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveFloatBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveFloatBinaryDescComparatorFactory INSTANCE = new HiveFloatBinaryDescComparatorFactory();

+

+    private HiveFloatBinaryDescComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private float left;

+            private float right;

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b1, s1));

+                right = Float.intBitsToFloat(LazyUtils.byteArrayToInt(b2, s2));

+                if (left > right)

+                    return -1;

+                else if (left == right)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryAscComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..0127791
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryAscComparatorFactory.java
@@ -0,0 +1,40 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveIntegerBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static final HiveIntegerBinaryAscComparatorFactory INSTANCE = new HiveIntegerBinaryAscComparatorFactory();

+

+    private HiveIntegerBinaryAscComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VInt left = new VInt();

+            private VInt right = new VInt();

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVInt(b1, s1, left);

+                LazyUtils.readVInt(b2, s2, right);

+

+                if (left.length != l1 || right.length != l2)

+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "

+                            + left.length + "," + right.length + " expected " + l1 + "," + l2);

+

+                if (left.value > right.value)

+                    return 1;

+                else if (left.value == right.value)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryDescComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..5116337
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveIntegerBinaryDescComparatorFactory.java
@@ -0,0 +1,38 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveIntegerBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static final HiveIntegerBinaryDescComparatorFactory INSTANCE = new HiveIntegerBinaryDescComparatorFactory();

+

+    private HiveIntegerBinaryDescComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VInt left = new VInt();

+            private VInt right = new VInt();

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVInt(b1, s1, left);

+                LazyUtils.readVInt(b2, s2, right);

+                if (left.length != l1 || right.length != l2)

+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "

+                            + left.length + " expected " + l1);

+                if (left.value > right.value)

+                    return -1;

+                else if (left.value == right.value)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryAscComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..fa416a9
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryAscComparatorFactory.java
@@ -0,0 +1,38 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VLong;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveLongBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static final HiveLongBinaryAscComparatorFactory INSTANCE = new HiveLongBinaryAscComparatorFactory();

+

+    private HiveLongBinaryAscComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VLong left = new VLong();

+            private VLong right = new VLong();

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVLong(b1, s1, left);

+                LazyUtils.readVLong(b2, s2, right);

+                if (left.length != l1 || right.length != l2)

+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "

+                            + left.length + " expected " + l1);

+                if (left.value > right.value)

+                    return 1;

+                else if (left.value == right.value)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryDescComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..e72dc62
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveLongBinaryDescComparatorFactory.java
@@ -0,0 +1,38 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VLong;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveLongBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static final HiveLongBinaryDescComparatorFactory INSTANCE = new HiveLongBinaryDescComparatorFactory();

+

+    private HiveLongBinaryDescComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VLong left = new VLong();

+            private VLong right = new VLong();

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVLong(b1, s1, left);

+                LazyUtils.readVLong(b2, s2, right);

+                if (left.length != l1 || right.length != l2)

+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "

+                            + left.length + " expected " + l1);

+                if (left.value > right.value)

+                    return -1;

+                else if (left.value == right.value)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryAscComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..a3745fa
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryAscComparatorFactory.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveShortBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveShortBinaryAscComparatorFactory INSTANCE = new HiveShortBinaryAscComparatorFactory();

+

+    private HiveShortBinaryAscComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private short left;

+            private short right;

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = LazyUtils.byteArrayToShort(b1, s1);

+                right = LazyUtils.byteArrayToShort(b2, s2);

+                if (left > right)

+                    return 1;

+                else if (left == right)

+                    return 0;

+                else

+                    return -1;

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryDescComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..44d3f43
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveShortBinaryDescComparatorFactory.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveShortBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveShortBinaryDescComparatorFactory INSTANCE = new HiveShortBinaryDescComparatorFactory();

+

+    private HiveShortBinaryDescComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private short left;

+            private short right;

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                left = LazyUtils.byteArrayToShort(b1, s1);

+                right = LazyUtils.byteArrayToShort(b2, s2);

+                if (left > right)

+                    return -1;

+                else if (left == right)

+                    return 0;

+                else

+                    return 1;

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryAscComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryAscComparatorFactory.java
new file mode 100644
index 0000000..6da9716
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryAscComparatorFactory.java
@@ -0,0 +1,40 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import org.apache.hadoop.io.Text;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveStringBinaryAscComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveStringBinaryAscComparatorFactory INSTANCE = new HiveStringBinaryAscComparatorFactory();

+

+    private HiveStringBinaryAscComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VInt leftLen = new VInt();

+            private VInt rightLen = new VInt();

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVInt(b1, s1, leftLen);

+                LazyUtils.readVInt(b2, s2, rightLen);

+

+                if (leftLen.value + leftLen.length != l1 || rightLen.value + rightLen.length != l2)

+                    throw new IllegalStateException("parse string: length mismatch, expected "

+                            + (leftLen.value + leftLen.length) + ", " + (rightLen.value + rightLen.length)

+                            + " but get " + l1 + ", " + l2);

+

+                return Text.Comparator.compareBytes(b1, s1 + leftLen.length, l1 - leftLen.length, b2, s2

+                        + rightLen.length, l2 - rightLen.length);

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryDescComparatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryDescComparatorFactory.java
new file mode 100644
index 0000000..c579711
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/comparator/HiveStringBinaryDescComparatorFactory.java
@@ -0,0 +1,39 @@
+package edu.uci.ics.hivesterix.runtime.factory.comparator;

+

+import org.apache.hadoop.io.WritableComparator;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveStringBinaryDescComparatorFactory implements IBinaryComparatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveStringBinaryDescComparatorFactory INSTANCE = new HiveStringBinaryDescComparatorFactory();

+

+    private HiveStringBinaryDescComparatorFactory() {

+    }

+

+    @Override

+    public IBinaryComparator createBinaryComparator() {

+        return new IBinaryComparator() {

+            private VInt leftLen = new VInt();

+            private VInt rightLen = new VInt();

+

+            @Override

+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

+                LazyUtils.readVInt(b1, s1, leftLen);

+                LazyUtils.readVInt(b2, s2, rightLen);

+

+                if (leftLen.value + leftLen.length != l1 || rightLen.value + rightLen.length != l2)

+                    throw new IllegalStateException("parse string: length mismatch, expected "

+                            + (leftLen.value + leftLen.length) + ", " + (rightLen.value + rightLen.length)

+                            + " but get " + l1 + ", " + l2);

+

+                return -WritableComparator.compareBytes(b1, s1 + leftLen.length, l1 - leftLen.length, b2, s2

+                        + rightLen.length, l2 - rightLen.length);

+            }

+        };

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionFactory.java
new file mode 100644
index 0000000..d664341
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionFactory.java
@@ -0,0 +1,367 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.List;

+

+import org.apache.hadoop.conf.Configuration;

+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;

+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;

+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;

+import org.apache.hadoop.hive.ql.exec.Utilities;

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;

+import org.apache.hadoop.hive.serde2.SerDe;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionTranslator;

+import edu.uci.ics.hivesterix.runtime.evaluator.AggregationFunctionEvaluator;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hivesterix.serde.lazy.LazyFactory;

+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;

+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyAggregateFunction;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyAggregateFunctionFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class AggregationFunctionFactory implements ICopyAggregateFunctionFactory {

+

+    private static final long serialVersionUID = 1L;

+

+    /**

+     * list of parameters' serialization

+     */

+    private List<String> parametersSerialization = new ArrayList<String>();

+

+    /**

+     * the name of the udf

+     */

+    private String genericUDAFName;

+

+    /**

+     * aggregation mode

+     */

+    private GenericUDAFEvaluator.Mode mode;

+

+    /**

+     * list of type info

+     */

+    private List<TypeInfo> types = new ArrayList<TypeInfo>();

+

+    /**

+     * distinct or not

+     */

+    private boolean distinct;

+

+    /**

+     * the schema of incoming rows

+     */

+    private Schema rowSchema;

+

+    /**

+     * list of parameters

+     */

+    private transient List<ExprNodeDesc> parametersOrigin;

+

+    /**

+     * row inspector

+     */

+    private transient ObjectInspector rowInspector = null;

+

+    /**

+     * output object inspector

+     */

+    private transient ObjectInspector outputInspector = null;

+

+    /**

+     * output object inspector

+     */

+    private transient ObjectInspector outputInspectorPartial = null;

+

+    /**

+     * parameter inspectors

+     */

+    private transient ObjectInspector[] parameterInspectors = null;

+

+    /**

+     * expression desc

+     */

+    private transient HashMap<Long, List<ExprNodeDesc>> parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();

+

+    /**

+     * evaluators

+     */

+    private transient HashMap<Long, ExprNodeEvaluator[]> evaluators = new HashMap<Long, ExprNodeEvaluator[]>();

+

+    /**

+     * cached parameter objects

+     */

+    private transient HashMap<Long, Object[]> cachedParameters = new HashMap<Long, Object[]>();

+

+    /**

+     * cached row object: one per thread

+     */

+    private transient HashMap<Long, LazyObject<? extends ObjectInspector>> cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();

+

+    /**

+     * we only use lazy serde to do serialization

+     */

+    private transient HashMap<Long, SerDe> serDe = new HashMap<Long, SerDe>();

+

+    /**

+     * udaf evaluators

+     */

+    private transient HashMap<Long, GenericUDAFEvaluator> udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();

+

+    /**

+     * udaf evaluators

+     */

+    private transient HashMap<Long, GenericUDAFEvaluator> udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();

+

+    /**

+     * aggregation function desc

+     */

+    private transient AggregationDesc aggregator;

+

+    /**

+     * @param aggregator

+     *            Algebricks function call expression

+     * @param oi

+     *            schema

+     */

+    public AggregationFunctionFactory(AggregateFunctionCallExpression expression, Schema oi,

+            IVariableTypeEnvironment env) throws AlgebricksException {

+

+        try {

+            aggregator = (AggregationDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e.getMessage());

+        }

+        init(aggregator.getParameters(), aggregator.getGenericUDAFName(), aggregator.getMode(),

+                aggregator.getDistinct(), oi);

+    }

+

+    /**

+     * constructor of aggregation function factory

+     * 

+     * @param inputs

+     * @param name

+     * @param udafMode

+     * @param distinct

+     * @param oi

+     */

+    private void init(List<ExprNodeDesc> inputs, String name, GenericUDAFEvaluator.Mode udafMode, boolean distinct,

+            Schema oi) {

+        parametersOrigin = inputs;

+        genericUDAFName = name;

+        mode = udafMode;

+        this.distinct = distinct;

+        rowSchema = oi;

+

+        for (ExprNodeDesc input : inputs) {

+            TypeInfo type = input.getTypeInfo();

+            if (type instanceof StructTypeInfo) {

+                types.add(TypeInfoFactory.doubleTypeInfo);

+            } else

+                types.add(type);

+

+            String s = Utilities.serializeExpression(input);

+            parametersSerialization.add(s);

+        }

+    }

+

+    @Override

+    public synchronized ICopyAggregateFunction createAggregateFunction(IDataOutputProvider provider)

+            throws AlgebricksException {

+        if (parametersOrigin == null) {

+            Configuration config = new Configuration();

+            config.setClassLoader(this.getClass().getClassLoader());

+            /**

+             * in case of class.forname(...) call in hive code

+             */

+            Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());

+

+            parametersOrigin = new ArrayList<ExprNodeDesc>();

+            for (String serialization : parametersSerialization) {

+                parametersOrigin.add(Utilities.deserializeExpression(serialization, config));

+            }

+        }

+

+        /**

+         * exprs

+         */

+        if (parameterExprs == null)

+            parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();

+

+        /**

+         * evaluators

+         */

+        if (evaluators == null)

+            evaluators = new HashMap<Long, ExprNodeEvaluator[]>();

+

+        /**

+         * cached parameter objects

+         */

+        if (cachedParameters == null)

+            cachedParameters = new HashMap<Long, Object[]>();

+

+        /**

+         * cached row object: one per thread

+         */

+        if (cachedRowObjects == null)

+            cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();

+

+        /**

+         * we only use lazy serde to do serialization

+         */

+        if (serDe == null)

+            serDe = new HashMap<Long, SerDe>();

+

+        /**

+         * UDAF functions

+         */

+        if (udafsComplete == null)

+            udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();

+

+        /**

+         * UDAF functions

+         */

+        if (udafsPartial == null)

+            udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();

+

+        if (parameterInspectors == null)

+            parameterInspectors = new ObjectInspector[parametersOrigin.size()];

+

+        if (rowInspector == null)

+            rowInspector = rowSchema.toObjectInspector();

+

+        // get current thread id

+        long threadId = Thread.currentThread().getId();

+

+        /**

+         * expressions, expressions are thread local

+         */

+        List<ExprNodeDesc> parameters = parameterExprs.get(threadId);

+        if (parameters == null) {

+            parameters = new ArrayList<ExprNodeDesc>();

+            for (ExprNodeDesc parameter : parametersOrigin)

+                parameters.add(parameter.clone());

+            parameterExprs.put(threadId, parameters);

+        }

+

+        /**

+         * cached parameter objects

+         */

+        Object[] cachedParas = cachedParameters.get(threadId);

+        if (cachedParas == null) {

+            cachedParas = new Object[parameters.size()];

+            cachedParameters.put(threadId, cachedParas);

+        }

+

+        /**

+         * cached row object: one per thread

+         */

+        LazyObject<? extends ObjectInspector> cachedRowObject = cachedRowObjects.get(threadId);

+        if (cachedRowObject == null) {

+            cachedRowObject = LazyFactory.createLazyObject(rowInspector);

+            cachedRowObjects.put(threadId, cachedRowObject);

+        }

+

+        /**

+         * we only use lazy serde to do serialization

+         */

+        SerDe lazySer = serDe.get(threadId);

+        if (lazySer == null) {

+            lazySer = new LazySerDe();

+            serDe.put(threadId, lazySer);

+        }

+

+        /**

+         * evaluators

+         */

+        ExprNodeEvaluator[] evals = evaluators.get(threadId);

+        if (evals == null) {

+            evals = new ExprNodeEvaluator[parameters.size()];

+            evaluators.put(threadId, evals);

+        }

+

+        GenericUDAFEvaluator udafPartial;

+        GenericUDAFEvaluator udafComplete;

+

+        // initialize object inspectors

+        try {

+            /**

+             * evaluators, udf, object inpsectors are shared in one thread

+             */

+            for (int i = 0; i < evals.length; i++) {

+                if (evals[i] == null) {

+                    evals[i] = ExprNodeEvaluatorFactory.get(parameters.get(i));

+                    if (parameterInspectors[i] == null) {

+                        parameterInspectors[i] = evals[i].initialize(rowInspector);

+                    } else {

+                        evals[i].initialize(rowInspector);

+                    }

+                }

+            }

+

+            udafComplete = udafsComplete.get(threadId);

+            if (udafComplete == null) {

+                try {

+                    udafComplete = FunctionRegistry.getGenericUDAFEvaluator(genericUDAFName, types, distinct, false);

+                } catch (HiveException e) {

+                    throw new AlgebricksException(e);

+                }

+                udafsComplete.put(threadId, udafComplete);

+                udafComplete.init(mode, parameterInspectors);

+            }

+

+            // multiple stage group by, determined by the mode parameter

+            if (outputInspector == null)

+                outputInspector = udafComplete.init(mode, parameterInspectors);

+

+            // initial partial gby udaf

+            GenericUDAFEvaluator.Mode partialMode;

+            // adjust mode for external groupby

+            if (mode == GenericUDAFEvaluator.Mode.COMPLETE)

+                partialMode = GenericUDAFEvaluator.Mode.PARTIAL1;

+            else if (mode == GenericUDAFEvaluator.Mode.FINAL)

+                partialMode = GenericUDAFEvaluator.Mode.PARTIAL2;

+            else

+                partialMode = mode;

+            udafPartial = udafsPartial.get(threadId);

+            if (udafPartial == null) {

+                try {

+                    udafPartial = FunctionRegistry.getGenericUDAFEvaluator(genericUDAFName, types, distinct, false);

+                } catch (HiveException e) {

+                    throw new AlgebricksException(e);

+                }

+                udafPartial.init(partialMode, parameterInspectors);

+                udafsPartial.put(threadId, udafPartial);

+            }

+

+            // multiple stage group by, determined by the mode parameter

+            if (outputInspectorPartial == null)

+                outputInspectorPartial = udafPartial.init(partialMode, parameterInspectors);

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e);

+        }

+

+        return new AggregationFunctionEvaluator(parameters, types, genericUDAFName, mode, distinct, rowInspector,

+                provider.getDataOutput(), evals, parameterInspectors, cachedParas, lazySer, cachedRowObject,

+                udafPartial, udafComplete, outputInspector, outputInspectorPartial);

+    }

+

+    public String toString() {

+        return "aggregation function expression evaluator factory: " + this.genericUDAFName;

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionSerializableFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionSerializableFactory.java
new file mode 100644
index 0000000..54a1155
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/AggregationFunctionSerializableFactory.java
@@ -0,0 +1,366 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+import edu.uci.ics.hivesterix.logical.expression.ExpressionTranslator;
+import edu.uci.ics.hivesterix.runtime.evaluator.AggregatuibFunctionSerializableEvaluator;
+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;
+import edu.uci.ics.hivesterix.serde.lazy.LazyFactory;
+import edu.uci.ics.hivesterix.serde.lazy.LazyObject;
+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunction;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunctionFactory;
+
+public class AggregationFunctionSerializableFactory implements ICopySerializableAggregateFunctionFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * list of parameters' serialization
+     */
+    private List<String> parametersSerialization = new ArrayList<String>();
+
+    /**
+     * the name of the udf
+     */
+    private String genericUDAFName;
+
+    /**
+     * aggregation mode
+     */
+    private GenericUDAFEvaluator.Mode mode;
+
+    /**
+     * list of type info
+     */
+    private List<TypeInfo> types = new ArrayList<TypeInfo>();
+
+    /**
+     * distinct or not
+     */
+    private boolean distinct;
+
+    /**
+     * the schema of incoming rows
+     */
+    private Schema rowSchema;
+
+    /**
+     * list of parameters
+     */
+    private transient List<ExprNodeDesc> parametersOrigin;
+
+    /**
+     * row inspector
+     */
+    private transient ObjectInspector rowInspector = null;
+
+    /**
+     * output object inspector
+     */
+    private transient ObjectInspector outputInspector = null;
+
+    /**
+     * output object inspector
+     */
+    private transient ObjectInspector outputInspectorPartial = null;
+
+    /**
+     * parameter inspectors
+     */
+    private transient ObjectInspector[] parameterInspectors = null;
+
+    /**
+     * expression desc
+     */
+    private transient HashMap<Long, List<ExprNodeDesc>> parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();
+
+    /**
+     * evaluators
+     */
+    private transient HashMap<Long, ExprNodeEvaluator[]> evaluators = new HashMap<Long, ExprNodeEvaluator[]>();
+
+    /**
+     * cached parameter objects
+     */
+    private transient HashMap<Long, Object[]> cachedParameters = new HashMap<Long, Object[]>();
+
+    /**
+     * cached row object: one per thread
+     */
+    private transient HashMap<Long, LazyObject<? extends ObjectInspector>> cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();
+
+    /**
+     * we only use lazy serde to do serialization
+     */
+    private transient HashMap<Long, SerDe> serDe = new HashMap<Long, SerDe>();
+
+    /**
+     * udaf evaluators
+     */
+    private transient HashMap<Long, GenericUDAFEvaluator> udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();
+
+    /**
+     * udaf evaluators
+     */
+    private transient HashMap<Long, GenericUDAFEvaluator> udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();
+
+    /**
+     * aggregation function desc
+     */
+    private transient AggregationDesc aggregator;
+
+    /**
+     * @param aggregator
+     *            Algebricks function call expression
+     * @param oi
+     *            schema
+     */
+    public AggregationFunctionSerializableFactory(AggregateFunctionCallExpression expression, Schema oi,
+            IVariableTypeEnvironment env) throws AlgebricksException {
+
+        try {
+            aggregator = (AggregationDesc) ExpressionTranslator.getHiveExpression(expression, env);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new AlgebricksException(e.getMessage());
+        }
+        init(aggregator.getParameters(), aggregator.getGenericUDAFName(), aggregator.getMode(),
+                aggregator.getDistinct(), oi);
+    }
+
+    /**
+     * constructor of aggregation function factory
+     * 
+     * @param inputs
+     * @param name
+     * @param udafMode
+     * @param distinct
+     * @param oi
+     */
+    private void init(List<ExprNodeDesc> inputs, String name, GenericUDAFEvaluator.Mode udafMode, boolean distinct,
+            Schema oi) {
+        parametersOrigin = inputs;
+        genericUDAFName = name;
+        mode = udafMode;
+        this.distinct = distinct;
+        rowSchema = oi;
+
+        for (ExprNodeDesc input : inputs) {
+            TypeInfo type = input.getTypeInfo();
+            if (type instanceof StructTypeInfo) {
+                types.add(TypeInfoFactory.doubleTypeInfo);
+            } else
+                types.add(type);
+
+            String s = Utilities.serializeExpression(input);
+            parametersSerialization.add(s);
+        }
+    }
+
+    @Override
+    public synchronized ICopySerializableAggregateFunction createAggregateFunction() throws AlgebricksException {
+        if (parametersOrigin == null) {
+            Configuration config = new Configuration();
+            config.setClassLoader(this.getClass().getClassLoader());
+            /**
+             * in case of class.forname(...) call in hive code
+             */
+            Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+
+            parametersOrigin = new ArrayList<ExprNodeDesc>();
+            for (String serialization : parametersSerialization) {
+                parametersOrigin.add(Utilities.deserializeExpression(serialization, config));
+            }
+        }
+
+        /**
+         * exprs
+         */
+        if (parameterExprs == null)
+            parameterExprs = new HashMap<Long, List<ExprNodeDesc>>();
+
+        /**
+         * evaluators
+         */
+        if (evaluators == null)
+            evaluators = new HashMap<Long, ExprNodeEvaluator[]>();
+
+        /**
+         * cached parameter objects
+         */
+        if (cachedParameters == null)
+            cachedParameters = new HashMap<Long, Object[]>();
+
+        /**
+         * cached row object: one per thread
+         */
+        if (cachedRowObjects == null)
+            cachedRowObjects = new HashMap<Long, LazyObject<? extends ObjectInspector>>();
+
+        /**
+         * we only use lazy serde to do serialization
+         */
+        if (serDe == null)
+            serDe = new HashMap<Long, SerDe>();
+
+        /**
+         * UDAF functions
+         */
+        if (udafsComplete == null)
+            udafsComplete = new HashMap<Long, GenericUDAFEvaluator>();
+
+        /**
+         * UDAF functions
+         */
+        if (udafsPartial == null)
+            udafsPartial = new HashMap<Long, GenericUDAFEvaluator>();
+
+        if (parameterInspectors == null)
+            parameterInspectors = new ObjectInspector[parametersOrigin.size()];
+
+        if (rowInspector == null)
+            rowInspector = rowSchema.toObjectInspector();
+
+        // get current thread id
+        long threadId = Thread.currentThread().getId();
+
+        /**
+         * expressions, expressions are thread local
+         */
+        List<ExprNodeDesc> parameters = parameterExprs.get(threadId);
+        if (parameters == null) {
+            parameters = new ArrayList<ExprNodeDesc>();
+            for (ExprNodeDesc parameter : parametersOrigin)
+                parameters.add(parameter.clone());
+            parameterExprs.put(threadId, parameters);
+        }
+
+        /**
+         * cached parameter objects
+         */
+        Object[] cachedParas = cachedParameters.get(threadId);
+        if (cachedParas == null) {
+            cachedParas = new Object[parameters.size()];
+            cachedParameters.put(threadId, cachedParas);
+        }
+
+        /**
+         * cached row object: one per thread
+         */
+        LazyObject<? extends ObjectInspector> cachedRowObject = cachedRowObjects.get(threadId);
+        if (cachedRowObject == null) {
+            cachedRowObject = LazyFactory.createLazyObject(rowInspector);
+            cachedRowObjects.put(threadId, cachedRowObject);
+        }
+
+        /**
+         * we only use lazy serde to do serialization
+         */
+        SerDe lazySer = serDe.get(threadId);
+        if (lazySer == null) {
+            lazySer = new LazySerDe();
+            serDe.put(threadId, lazySer);
+        }
+
+        /**
+         * evaluators
+         */
+        ExprNodeEvaluator[] evals = evaluators.get(threadId);
+        if (evals == null) {
+            evals = new ExprNodeEvaluator[parameters.size()];
+            evaluators.put(threadId, evals);
+        }
+
+        GenericUDAFEvaluator udafPartial;
+        GenericUDAFEvaluator udafComplete;
+
+        // initialize object inspectors
+        try {
+            /**
+             * evaluators, udf, object inpsectors are shared in one thread
+             */
+            for (int i = 0; i < evals.length; i++) {
+                if (evals[i] == null) {
+                    evals[i] = ExprNodeEvaluatorFactory.get(parameters.get(i));
+                    if (parameterInspectors[i] == null) {
+                        parameterInspectors[i] = evals[i].initialize(rowInspector);
+                    } else {
+                        evals[i].initialize(rowInspector);
+                    }
+                }
+            }
+
+            udafComplete = udafsComplete.get(threadId);
+            if (udafComplete == null) {
+                try {
+                    udafComplete = FunctionRegistry.getGenericUDAFEvaluator(genericUDAFName, types, distinct, false);
+                } catch (HiveException e) {
+                    throw new AlgebricksException(e);
+                }
+                udafsComplete.put(threadId, udafComplete);
+                udafComplete.init(mode, parameterInspectors);
+            }
+
+            // multiple stage group by, determined by the mode parameter
+            if (outputInspector == null)
+                outputInspector = udafComplete.init(mode, parameterInspectors);
+
+            // initial partial gby udaf
+            GenericUDAFEvaluator.Mode partialMode;
+            // adjust mode for external groupby
+            if (mode == GenericUDAFEvaluator.Mode.COMPLETE)
+                partialMode = GenericUDAFEvaluator.Mode.PARTIAL1;
+            else if (mode == GenericUDAFEvaluator.Mode.FINAL)
+                partialMode = GenericUDAFEvaluator.Mode.PARTIAL2;
+            else
+                partialMode = mode;
+            udafPartial = udafsPartial.get(threadId);
+            if (udafPartial == null) {
+                try {
+                    udafPartial = FunctionRegistry.getGenericUDAFEvaluator(genericUDAFName, types, distinct, false);
+                } catch (HiveException e) {
+                    throw new AlgebricksException(e);
+                }
+                udafPartial.init(partialMode, parameterInspectors);
+                udafsPartial.put(threadId, udafPartial);
+            }
+
+            // multiple stage group by, determined by the mode parameter
+            if (outputInspectorPartial == null)
+                outputInspectorPartial = udafPartial.init(partialMode, parameterInspectors);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new AlgebricksException(e);
+        }
+
+        return new AggregatuibFunctionSerializableEvaluator(parameters, types, genericUDAFName, mode, distinct,
+                rowInspector, evals, parameterInspectors, cachedParas, lazySer, cachedRowObject, udafPartial,
+                udafComplete, outputInspector, outputInspectorPartial);
+    }
+
+    public String toString() {
+        return "aggregation function expression evaluator factory: " + this.genericUDAFName;
+    }
+
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ColumnExpressionEvaluatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ColumnExpressionEvaluatorFactory.java
new file mode 100644
index 0000000..6f51bfe
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ColumnExpressionEvaluatorFactory.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionTranslator;

+import edu.uci.ics.hivesterix.runtime.evaluator.ColumnExpressionEvaluator;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class ColumnExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

+

+    private static final long serialVersionUID = 1L;

+

+    private ExprNodeColumnDesc expr;

+

+    private Schema inputSchema;

+

+    public ColumnExpressionEvaluatorFactory(ILogicalExpression expression, Schema schema, IVariableTypeEnvironment env)

+            throws AlgebricksException {

+        try {

+            expr = (ExprNodeColumnDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            throw new AlgebricksException(e.getMessage());

+        }

+        inputSchema = schema;

+    }

+

+    public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {

+        return new ColumnExpressionEvaluator(expr, inputSchema.toObjectInspector(), output);

+    }

+

+    public String toString() {

+        return "column expression evaluator factory: " + expr.toString();

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ConstantExpressionEvaluatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ConstantExpressionEvaluatorFactory.java
new file mode 100644
index 0000000..4ecdb70
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ConstantExpressionEvaluatorFactory.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionTranslator;

+import edu.uci.ics.hivesterix.runtime.evaluator.ConstantExpressionEvaluator;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class ConstantExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

+

+    private static final long serialVersionUID = 1L;

+

+    private ExprNodeConstantDesc expr;

+

+    private Schema schema;

+

+    public ConstantExpressionEvaluatorFactory(ILogicalExpression expression, Schema inputSchema,

+            IVariableTypeEnvironment env) throws AlgebricksException {

+        try {

+            expr = (ExprNodeConstantDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            throw new AlgebricksException(e.getMessage());

+        }

+        schema = inputSchema;

+    }

+

+    public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {

+        return new ConstantExpressionEvaluator(expr, schema.toObjectInspector(), output);

+    }

+

+    public String toString() {

+        return "constant expression evaluator factory: " + expr.toString();

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/FieldExpressionEvaluatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/FieldExpressionEvaluatorFactory.java
new file mode 100644
index 0000000..ef0c104
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/FieldExpressionEvaluatorFactory.java
@@ -0,0 +1,40 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionTranslator;

+import edu.uci.ics.hivesterix.runtime.evaluator.FieldExpressionEvaluator;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class FieldExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

+    private static final long serialVersionUID = 1L;

+

+    private ExprNodeFieldDesc expr;

+

+    private Schema inputSchema;

+

+    public FieldExpressionEvaluatorFactory(ILogicalExpression expression, Schema schema, IVariableTypeEnvironment env)

+            throws AlgebricksException {

+        try {

+            expr = (ExprNodeFieldDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            throw new AlgebricksException(e.getMessage());

+        }

+        inputSchema = schema;

+    }

+

+    public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {

+        return new FieldExpressionEvaluator(expr, inputSchema.toObjectInspector(), output);

+    }

+

+    public String toString() {

+        return "field access expression evaluator factory: " + expr.toString();

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/HiveExpressionRuntimeProvider.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/HiveExpressionRuntimeProvider.java
new file mode 100644
index 0000000..c3b4b17
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/HiveExpressionRuntimeProvider.java
@@ -0,0 +1,167 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import java.util.ArrayList;

+import java.util.Iterator;

+import java.util.List;

+

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionConstant;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.AggregateFunctionFactoryAdapter;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.UnnestingFunctionFactoryAdapter;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;

+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IAggregateEvaluatorFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunctionFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IRunningAggregateEvaluatorFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IUnnestingEvaluatorFactory;

+

+public class HiveExpressionRuntimeProvider implements IExpressionRuntimeProvider {

+

+    public static final IExpressionRuntimeProvider INSTANCE = new HiveExpressionRuntimeProvider();

+

+    @Override

+    public IAggregateEvaluatorFactory createAggregateFunctionFactory(AggregateFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new AggregateFunctionFactoryAdapter(new AggregationFunctionFactory(expr, schema, env));

+    }

+

+    @Override

+    public ICopySerializableAggregateFunctionFactory createSerializableAggregateFunctionFactory(

+            AggregateFunctionCallExpression expr, IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas,

+            JobGenContext context) throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new AggregationFunctionSerializableFactory(expr, schema, env);

+    }

+

+    @Override

+    public IRunningAggregateEvaluatorFactory createRunningAggregateFunctionFactory(StatefulFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public IUnnestingEvaluatorFactory createUnnestingFunctionFactory(UnnestingFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new UnnestingFunctionFactoryAdapter(new UnnestingFunctionFactory(expr, schema, env));

+    }

+

+    public IScalarEvaluatorFactory createEvaluatorFactory(ILogicalExpression expr, IVariableTypeEnvironment env,

+            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {

+        switch (expr.getExpressionTag()) {

+            case VARIABLE: {

+                VariableReferenceExpression v = (VariableReferenceExpression) expr;

+                return new ScalarEvaluatorFactoryAdapter(createVariableEvaluatorFactory(v, env, inputSchemas, context));

+            }

+            case CONSTANT: {

+                ConstantExpression c = (ConstantExpression) expr;

+                return new ScalarEvaluatorFactoryAdapter(createConstantEvaluatorFactory(c, env, inputSchemas, context));

+            }

+            case FUNCTION_CALL: {

+                AbstractFunctionCallExpression fun = (AbstractFunctionCallExpression) expr;

+                FunctionIdentifier fid = fun.getFunctionIdentifier();

+

+                if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

+                    return new ScalarEvaluatorFactoryAdapter(createFieldExpressionEvaluatorFactory(fun, env,

+                            inputSchemas, context));

+                }

+

+                if (fid.getName().equals(ExpressionConstant.FIELDACCESS)) {

+                    return new ScalarEvaluatorFactoryAdapter(createNullExpressionEvaluatorFactory(fun, env,

+                            inputSchemas, context));

+                }

+

+                if (fun.getKind() == FunctionKind.SCALAR) {

+                    ScalarFunctionCallExpression scalar = (ScalarFunctionCallExpression) fun;

+                    return new ScalarEvaluatorFactoryAdapter(createScalarFunctionEvaluatorFactory(scalar, env,

+                            inputSchemas, context));

+                } else {

+                    throw new AlgebricksException("Cannot create evaluator for function " + fun + " of kind "

+                            + fun.getKind());

+                }

+            }

+            default: {

+                throw new IllegalStateException();

+            }

+        }

+    }

+

+    private ICopyEvaluatorFactory createVariableEvaluatorFactory(VariableReferenceExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new ColumnExpressionEvaluatorFactory(expr, schema, env);

+    }

+

+    private ICopyEvaluatorFactory createScalarFunctionEvaluatorFactory(AbstractFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        List<String> names = new ArrayList<String>();

+        List<TypeInfo> types = new ArrayList<TypeInfo>();

+        for (IOperatorSchema inputSchema : inputSchemas) {

+            Schema schema = this.getSchema(inputSchema, env);

+            names.addAll(schema.getNames());

+            types.addAll(schema.getTypes());

+        }

+        Schema inputSchema = new Schema(names, types);

+        return new ScalarFunctionExpressionEvaluatorFactory(expr, inputSchema, env);

+    }

+

+    private ICopyEvaluatorFactory createFieldExpressionEvaluatorFactory(AbstractFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new FieldExpressionEvaluatorFactory(expr, schema, env);

+    }

+

+    private ICopyEvaluatorFactory createNullExpressionEvaluatorFactory(AbstractFunctionCallExpression expr,

+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)

+            throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new NullExpressionEvaluatorFactory(expr, schema, env);

+    }

+

+    private ICopyEvaluatorFactory createConstantEvaluatorFactory(ConstantExpression expr, IVariableTypeEnvironment env,

+            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {

+        Schema schema = this.getSchema(inputSchemas[0], env);

+        return new ConstantExpressionEvaluatorFactory(expr, schema, env);

+    }

+

+    private Schema getSchema(IOperatorSchema inputSchema, IVariableTypeEnvironment env) throws AlgebricksException {

+        List<String> names = new ArrayList<String>();

+        List<TypeInfo> types = new ArrayList<TypeInfo>();

+        Iterator<LogicalVariable> variables = inputSchema.iterator();

+        while (variables.hasNext()) {

+            LogicalVariable var = variables.next();

+            names.add(var.toString());

+            types.add((TypeInfo) env.getVarType(var));

+        }

+

+        Schema schema = new Schema(names, types);

+        return schema;

+    }

+

+}
\ No newline at end of file
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/NullExpressionEvaluatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/NullExpressionEvaluatorFactory.java
new file mode 100644
index 0000000..f37b825
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/NullExpressionEvaluatorFactory.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionTranslator;

+import edu.uci.ics.hivesterix.runtime.evaluator.NullExpressionEvaluator;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class NullExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

+

+    private static final long serialVersionUID = 1L;

+

+    private ExprNodeNullDesc expr;

+

+    private Schema schema;

+

+    public NullExpressionEvaluatorFactory(ILogicalExpression expression, Schema intputSchema,

+            IVariableTypeEnvironment env) throws AlgebricksException {

+        try {

+            expr = (ExprNodeNullDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            throw new AlgebricksException(e.getMessage());

+        }

+        schema = intputSchema;

+    }

+

+    public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {

+        return new NullExpressionEvaluator(expr, schema.toObjectInspector(), output);

+    }

+

+    public String toString() {

+        return "null expression evaluator factory: " + expr.toString();

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ScalarFunctionExpressionEvaluatorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ScalarFunctionExpressionEvaluatorFactory.java
new file mode 100644
index 0000000..cbac10a
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/ScalarFunctionExpressionEvaluatorFactory.java
@@ -0,0 +1,69 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.conf.Configuration;

+import org.apache.hadoop.hive.ql.exec.Utilities;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionTranslator;

+import edu.uci.ics.hivesterix.runtime.evaluator.FunctionExpressionEvaluator;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class ScalarFunctionExpressionEvaluatorFactory implements ICopyEvaluatorFactory {

+

+    private static final long serialVersionUID = 1L;

+

+    private transient ExprNodeGenericFuncDesc expr;

+

+    private String exprSerialization;

+

+    private Schema inputSchema;

+

+    private transient Configuration config;

+

+    public ScalarFunctionExpressionEvaluatorFactory(ILogicalExpression expression, Schema schema,

+            IVariableTypeEnvironment env) throws AlgebricksException {

+        try {

+            expr = (ExprNodeGenericFuncDesc) ExpressionTranslator.getHiveExpression(expression, env);

+

+            exprSerialization = Utilities.serializeExpression(expr);

+

+        } catch (Exception e) {

+            e.printStackTrace();

+            throw new AlgebricksException(e.getMessage());

+        }

+        inputSchema = schema;

+    }

+

+    public synchronized ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {

+        if (expr == null) {

+            configClassLoader();

+            expr = (ExprNodeGenericFuncDesc) Utilities.deserializeExpression(exprSerialization, config);

+        }

+

+        ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) expr.clone();

+        return new FunctionExpressionEvaluator(funcDesc, inputSchema.toObjectInspector(), output);

+    }

+

+    private void configClassLoader() {

+        config = new Configuration();

+        ClassLoader loader = this.getClass().getClassLoader();

+        config.setClassLoader(loader);

+        Thread.currentThread().setContextClassLoader(loader);

+    }

+

+    public String toString() {

+        if (expr == null) {

+            configClassLoader();

+            expr = (ExprNodeGenericFuncDesc) Utilities.deserializeExpression(exprSerialization, new Configuration());

+        }

+

+        return "function expression evaluator factory: " + expr.getExprString();

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/UnnestingFunctionFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/UnnestingFunctionFactory.java
new file mode 100644
index 0000000..3b22513
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/evaluator/UnnestingFunctionFactory.java
@@ -0,0 +1,40 @@
+package edu.uci.ics.hivesterix.runtime.factory.evaluator;

+

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionTranslator;

+import edu.uci.ics.hivesterix.runtime.evaluator.UDTFFunctionEvaluator;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyUnnestingFunction;

+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyUnnestingFunctionFactory;

+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;

+

+public class UnnestingFunctionFactory implements ICopyUnnestingFunctionFactory {

+

+    private static final long serialVersionUID = 1L;

+

+    private UDTFDesc expr;

+

+    private Schema inputSchema;

+

+    private int[] columns;

+

+    public UnnestingFunctionFactory(ILogicalExpression expression, Schema schema, IVariableTypeEnvironment env)

+            throws AlgebricksException {

+        try {

+            expr = (UDTFDesc) ExpressionTranslator.getHiveExpression(expression, env);

+        } catch (Exception e) {

+            throw new AlgebricksException(e.getMessage());

+        }

+        inputSchema = schema;

+    }

+

+    @Override

+    public ICopyUnnestingFunction createUnnestingFunction(IDataOutputProvider provider) throws AlgebricksException {

+        return new UDTFFunctionEvaluator(expr, inputSchema, columns, provider.getDataOutput());

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveDoubleBinaryHashFunctionFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveDoubleBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..b636009
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveDoubleBinaryHashFunctionFactory.java
@@ -0,0 +1,29 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveDoubleBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveDoubleBinaryHashFunctionFactory INSTANCE = new HiveDoubleBinaryHashFunctionFactory();

+

+    private HiveDoubleBinaryHashFunctionFactory() {

+    }

+

+    @Override

+    public IBinaryHashFunction createBinaryHashFunction() {

+        // TODO Auto-generated method stub

+        return new IBinaryHashFunction() {

+            private Double value;

+

+            @Override

+            public int hash(byte[] bytes, int offset, int length) {

+                value = Double.longBitsToDouble(LazyUtils.byteArrayToLong(bytes, offset));

+                return value.hashCode();

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveIntegerBinaryHashFunctionFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveIntegerBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..90e6ce4
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveIntegerBinaryHashFunctionFactory.java
@@ -0,0 +1,33 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveIntegerBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static IBinaryHashFunctionFactory INSTANCE = new HiveIntegerBinaryHashFunctionFactory();

+

+    private HiveIntegerBinaryHashFunctionFactory() {

+    }

+

+    @Override

+    public IBinaryHashFunction createBinaryHashFunction() {

+

+        return new IBinaryHashFunction() {

+            private VInt value = new VInt();

+

+            @Override

+            public int hash(byte[] bytes, int offset, int length) {

+                LazyUtils.readVInt(bytes, offset, value);

+                if (value.length != length)

+                    throw new IllegalArgumentException("length mismatch in int hash function actual: " + length

+                            + " expected " + value.length);

+                return value.value;

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveLongBinaryHashFunctionFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveLongBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..1b61f67
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveLongBinaryHashFunctionFactory.java
@@ -0,0 +1,30 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VLong;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveLongBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static IBinaryHashFunctionFactory INSTANCE = new HiveLongBinaryHashFunctionFactory();

+

+    private HiveLongBinaryHashFunctionFactory() {

+    }

+

+    @Override

+    public IBinaryHashFunction createBinaryHashFunction() {

+

+        return new IBinaryHashFunction() {

+            private VLong value = new VLong();

+

+            @Override

+            public int hash(byte[] bytes, int offset, int length) {

+                LazyUtils.readVLong(bytes, offset, value);

+                return (int) value.value;

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveRawBinaryHashFunctionFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveRawBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..f2b7b44
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveRawBinaryHashFunctionFactory.java
@@ -0,0 +1,31 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;

+

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveRawBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static IBinaryHashFunctionFactory INSTANCE = new HiveRawBinaryHashFunctionFactory();

+

+    private HiveRawBinaryHashFunctionFactory() {

+

+    }

+

+    @Override

+    public IBinaryHashFunction createBinaryHashFunction() {

+

+        return new IBinaryHashFunction() {

+

+            @Override

+            public int hash(byte[] bytes, int offset, int length) {

+                int value = 1;

+                int end = offset + length;

+                for (int i = offset; i < end; i++)

+                    value = value * 31 + (int) bytes[i];

+                return value;

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveStingBinaryHashFunctionFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveStingBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..a9cf6fd
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/HiveStingBinaryHashFunctionFactory.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveStingBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {

+    private static final long serialVersionUID = 1L;

+

+    public static HiveStingBinaryHashFunctionFactory INSTANCE = new HiveStingBinaryHashFunctionFactory();

+

+    private HiveStingBinaryHashFunctionFactory() {

+    }

+

+    @Override

+    public IBinaryHashFunction createBinaryHashFunction() {

+        // TODO Auto-generated method stub

+        return new IBinaryHashFunction() {

+            private VInt len = new VInt();

+

+            @Override

+            public int hash(byte[] bytes, int offset, int length) {

+                LazyUtils.readVInt(bytes, offset, len);

+                if (len.value + len.length != length)

+                    throw new IllegalStateException("parse string: length mismatch, expected "

+                            + (len.value + len.length) + " but get " + length);

+                return hashBytes(bytes, offset + len.length, length - len.length);

+            }

+

+            public int hashBytes(byte[] bytes, int offset, int length) {

+                int value = 1;

+                int end = offset + length;

+                for (int i = offset; i < end; i++)

+                    value = value * 31 + (int) bytes[i];

+                return value;

+            }

+        };

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/MurmurHash3BinaryHashFunctionFamily.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/MurmurHash3BinaryHashFunctionFamily.java
new file mode 100644
index 0000000..760a614
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/MurmurHash3BinaryHashFunctionFamily.java
@@ -0,0 +1,63 @@
+package edu.uci.ics.hivesterix.runtime.factory.hashfunction;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+
+public class MurmurHash3BinaryHashFunctionFamily implements IBinaryHashFunctionFamily {
+
+    public static final IBinaryHashFunctionFamily INSTANCE = new MurmurHash3BinaryHashFunctionFamily();
+
+    private static final long serialVersionUID = 1L;
+
+    private MurmurHash3BinaryHashFunctionFamily() {
+    }
+
+    private static final int C1 = 0xcc9e2d51;
+    private static final int C2 = 0x1b873593;
+    private static final int C3 = 5;
+    private static final int C4 = 0xe6546b64;
+    private static final int C5 = 0x85ebca6b;
+    private static final int C6 = 0xc2b2ae35;
+
+    @Override
+    public IBinaryHashFunction createBinaryHashFunction(final int seed) {
+        return new IBinaryHashFunction() {
+            @Override
+            public int hash(byte[] bytes, int offset, int length) {
+                int h = seed;
+                int p = offset;
+                int remain = length;
+                while (remain >= 4) {
+                    int k = (bytes[p] & 0xff) | ((bytes[p + 1] & 0xff) << 8) | ((bytes[p + 2] & 0xff) << 16)
+                            | ((bytes[p + 3] & 0xff) << 24);
+                    k *= C1;
+                    k = Integer.rotateLeft(k, 15);
+                    k *= C2;
+                    h ^= k;
+                    h = Integer.rotateLeft(h, 13);
+                    h = h * C3 + C4;
+                    p += 4;
+                    remain -= 4;
+                }
+                if (remain > 0) {
+                    int k = 0;
+                    for (int i = 0; remain > 0; i += 8) {
+                        k ^= (bytes[p++] & 0xff) << i;
+                        remain--;
+                    }
+                    k *= C1;
+                    k = Integer.rotateLeft(k, 15);
+                    k *= C2;
+                    h ^= k;
+                }
+                h ^= length;
+                h ^= (h >>> 16);
+                h *= C5;
+                h ^= (h >>> 13);
+                h *= C6;
+                h ^= (h >>> 16);
+                return h;
+            }
+        };
+    }
+}
\ No newline at end of file
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleAscNormalizedKeyComputerFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleAscNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..6ac012f
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleAscNormalizedKeyComputerFactory.java
@@ -0,0 +1,24 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveDoubleAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+
+        return new INormalizedKeyComputer() {
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int header = LazyUtils.byteArrayToInt(bytes, start);
+                long unsignedValue = (long) header;
+                return (int) ((unsignedValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+            }
+        };
+    }
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleDescNormalizedKeyComputerFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleDescNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..3044109
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveDoubleDescNormalizedKeyComputerFactory.java
@@ -0,0 +1,24 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveDoubleDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+    private final INormalizedKeyComputerFactory ascNormalizedKeyComputerFactory = new HiveDoubleAscNormalizedKeyComputerFactory();
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+            private INormalizedKeyComputer nmkComputer = ascNormalizedKeyComputerFactory.createNormalizedKeyComputer();
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int nk = nmkComputer.normalize(bytes, start, length);
+                return (int) ((long) Integer.MAX_VALUE - (long) (nk - Integer.MIN_VALUE));
+            }
+
+        };
+    }
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerAscNormalizedKeyComputerFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerAscNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..a1d4d48
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerAscNormalizedKeyComputerFactory.java
@@ -0,0 +1,29 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveIntegerAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+
+        return new INormalizedKeyComputer() {
+            private VInt vint = new VInt();
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                LazyUtils.readVInt(bytes, start, vint);
+                if (vint.length != length)
+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "
+                            + vint.length + " expected " + length);
+                long unsignedValue = (long) vint.value;
+                return (int) ((unsignedValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+            }
+        };
+    }
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerDescNormalizedKeyComputerFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerDescNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..b8a30a8
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveIntegerDescNormalizedKeyComputerFactory.java
@@ -0,0 +1,29 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveIntegerDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+
+        return new INormalizedKeyComputer() {
+            private VInt vint = new VInt();
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                LazyUtils.readVInt(bytes, start, vint);
+                if (vint.length != length)
+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "
+                            + vint.length + " expected " + length);
+                long unsignedValue = (long) vint.value;
+                return (int) ((long) 0xffffffff - unsignedValue);
+            }
+        };
+    }
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongAscNormalizedKeyComputerFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongAscNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..a893d19
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongAscNormalizedKeyComputerFactory.java
@@ -0,0 +1,63 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VLong;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveLongAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+
+        return new INormalizedKeyComputer() {
+            private static final int POSTIVE_LONG_MASK = (3 << 30);
+            private static final int NON_NEGATIVE_INT_MASK = (2 << 30);
+            private static final int NEGATIVE_LONG_MASK = (0 << 30);
+            private VLong vlong = new VLong();
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                LazyUtils.readVLong(bytes, start, vlong);
+                if (vlong.length != length)
+                    throw new IllegalArgumentException("length mismatch in int comparator function actual: "
+                            + vlong.length + " expected " + length);
+                long value = (long) vlong.value;
+                int highValue = (int) (value >> 32);
+                if (highValue > 0) {
+                    /**
+                     * larger than Integer.MAX
+                     */
+                    int highNmk = getKey(highValue);
+                    highNmk >>= 2;
+                    highNmk |= POSTIVE_LONG_MASK;
+                    return highNmk;
+                } else if (highValue == 0) {
+                    /**
+                     * smaller than Integer.MAX but >=0
+                     */
+                    int lowNmk = (int) value;
+                    lowNmk >>= 2;
+                    lowNmk |= NON_NEGATIVE_INT_MASK;
+                    return lowNmk;
+                } else {
+                    /**
+                     * less than 0; TODO: have not optimized for that
+                     */
+                    int highNmk = getKey(highValue);
+                    highNmk >>= 2;
+                    highNmk |= NEGATIVE_LONG_MASK;
+                    return highNmk;
+                }
+            }
+
+            private int getKey(int value) {
+                long unsignedFirstValue = (long) value;
+                int nmk = (int) ((unsignedFirstValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+                return nmk;
+            }
+        };
+    }
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongDescNormalizedKeyComputerFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongDescNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..cc5661b
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveLongDescNormalizedKeyComputerFactory.java
@@ -0,0 +1,25 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveLongDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+    private final INormalizedKeyComputerFactory ascNormalizedKeyComputerFactory = new HiveIntegerAscNormalizedKeyComputerFactory();
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+            private INormalizedKeyComputer nmkComputer = ascNormalizedKeyComputerFactory.createNormalizedKeyComputer();
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int nk = nmkComputer.normalize(bytes, start, length);
+                return (int) ((long) Integer.MAX_VALUE - (long) (nk - Integer.MIN_VALUE));
+            }
+
+        };
+    }
+
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringAscNormalizedKeyComputerFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringAscNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..d0429d6
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringAscNormalizedKeyComputerFactory.java
@@ -0,0 +1,40 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+
+public class HiveStringAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+
+        return new INormalizedKeyComputer() {
+            private VInt len = new VInt();
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                LazyUtils.readVInt(bytes, start, len);
+
+                if (len.value + len.length != length)
+                    throw new IllegalStateException("parse string: length mismatch, expected "
+                            + (len.value + len.length) + " but get " + length);
+                int nk = 0;
+                int offset = start + len.length;
+                for (int i = 0; i < 2; ++i) {
+                    nk <<= 16;
+                    if (i < len.value) {
+                        char character = UTF8StringPointable.charAt(bytes, offset);
+                        nk += ((int) character) & 0xffff;
+                        offset += UTF8StringPointable.charSize(bytes, offset);
+                    }
+                }
+                return nk;
+            }
+        };
+    }
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringDescNormalizedKeyComputerFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringDescNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..15b2d27
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/normalize/HiveStringDescNormalizedKeyComputerFactory.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.hivesterix.runtime.factory.normalize;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+
+public class HiveStringDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+            private VInt len = new VInt();
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                LazyUtils.readVInt(bytes, start, len);
+                if (len.value + len.length != length)
+                    throw new IllegalStateException("parse string: length mismatch, expected "
+                            + (len.value + len.length) + " but get " + length);
+                int nk = 0;
+                int offset = start + len.length;
+                for (int i = 0; i < 2; ++i) {
+                    nk <<= 16;
+                    if (i < len.value) {
+                        nk += ((int) UTF8StringPointable.charAt(bytes, offset)) & 0xffff;
+                        offset += UTF8StringPointable.charSize(bytes, offset);
+                    }
+                }
+                return (int) ((long) 0xffffffff - (long) nk);
+            }
+        };
+    }
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/nullwriter/HiveNullWriterFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/nullwriter/HiveNullWriterFactory.java
new file mode 100644
index 0000000..590bd61
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/nullwriter/HiveNullWriterFactory.java
@@ -0,0 +1,28 @@
+package edu.uci.ics.hivesterix.runtime.factory.nullwriter;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class HiveNullWriterFactory implements INullWriterFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    public static HiveNullWriterFactory INSTANCE = new HiveNullWriterFactory();
+
+    @Override
+    public INullWriter createNullWriter() {
+        return new HiveNullWriter();
+    }
+}
+
+class HiveNullWriter implements INullWriter {
+
+    @Override
+    public void writeNull(DataOutput out) throws HyracksDataException {
+        // do nothing
+    }
+
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspector.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspector.java
new file mode 100644
index 0000000..677e20e
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspector.java
@@ -0,0 +1,19 @@
+package edu.uci.ics.hivesterix.runtime.inspector;

+

+import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspector;

+

+public class HiveBinaryBooleanInspector implements IBinaryBooleanInspector {

+

+    HiveBinaryBooleanInspector() {

+    }

+

+    @Override

+    public boolean getBooleanValue(byte[] bytes, int offset, int length) {

+        if (length == 0)

+            return false;

+        if (length != 1)

+            throw new IllegalStateException("boolean field error: with length " + length);

+        return bytes[0] == 1;

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspectorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspectorFactory.java
new file mode 100644
index 0000000..22a6065
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryBooleanInspectorFactory.java
@@ -0,0 +1,20 @@
+package edu.uci.ics.hivesterix.runtime.inspector;
+
+import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspector;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public class HiveBinaryBooleanInspectorFactory implements IBinaryBooleanInspectorFactory {
+    private static final long serialVersionUID = 1L;
+    public static HiveBinaryBooleanInspectorFactory INSTANCE = new HiveBinaryBooleanInspectorFactory();
+
+    private HiveBinaryBooleanInspectorFactory() {
+
+    }
+
+    @Override
+    public IBinaryBooleanInspector createBinaryBooleanInspector(IHyracksTaskContext arg0) {
+        return new HiveBinaryBooleanInspector();
+    }
+
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspector.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspector.java
new file mode 100644
index 0000000..555afee
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspector.java
@@ -0,0 +1,22 @@
+package edu.uci.ics.hivesterix.runtime.inspector;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;

+import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspector;

+

+public class HiveBinaryIntegerInspector implements IBinaryIntegerInspector {

+    private VInt value = new VInt();

+

+    HiveBinaryIntegerInspector() {

+    }

+

+    @Override

+    public int getIntegerValue(byte[] bytes, int offset, int length) {

+        LazyUtils.readVInt(bytes, offset, value);

+        if (value.length != length)

+            throw new IllegalArgumentException("length mismatch in int hash function actual: " + length + " expected "

+                    + value.length);

+        return value.value;

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspectorFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspectorFactory.java
new file mode 100644
index 0000000..bb93a60
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/inspector/HiveBinaryIntegerInspectorFactory.java
@@ -0,0 +1,20 @@
+package edu.uci.ics.hivesterix.runtime.inspector;
+
+import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspector;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public class HiveBinaryIntegerInspectorFactory implements IBinaryIntegerInspectorFactory {
+    private static final long serialVersionUID = 1L;
+    public static HiveBinaryIntegerInspectorFactory INSTANCE = new HiveBinaryIntegerInspectorFactory();
+
+    private HiveBinaryIntegerInspectorFactory() {
+
+    }
+
+    @Override
+    public IBinaryIntegerInspector createBinaryIntegerInspector(IHyracksTaskContext arg0) {
+        return new HiveBinaryIntegerInspector();
+    }
+
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveConnectorPolicyAssignmentPolicy.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveConnectorPolicyAssignmentPolicy.java
new file mode 100644
index 0000000..cfceb26
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveConnectorPolicyAssignmentPolicy.java
@@ -0,0 +1,68 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;
+
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.PipeliningConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.SendSideMaterializedBlockingConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.SendSideMaterializedPipeliningConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.SendSideMaterializedReceiveSideMaterializedBlockingConnectorPolicy;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+
+public class HiveConnectorPolicyAssignmentPolicy implements IConnectorPolicyAssignmentPolicy {
+    public enum Policy {
+        PIPELINING,
+        SEND_SIDE_MAT_PIPELINING,
+        SEND_SIDE_MAT_BLOCKING,
+        SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING;
+    };
+
+    private static final long serialVersionUID = 1L;
+
+    private final IConnectorPolicy pipeliningPolicy = new PipeliningConnectorPolicy();
+    private final IConnectorPolicy sendSideMatPipeliningPolicy = new SendSideMaterializedPipeliningConnectorPolicy();
+    private final IConnectorPolicy sendSideMatBlockingPolicy = new SendSideMaterializedBlockingConnectorPolicy();
+    private final IConnectorPolicy sendSideMatReceiveSideMatBlockingPolicy = new SendSideMaterializedReceiveSideMaterializedBlockingConnectorPolicy();
+    private final Policy policy;
+
+    public HiveConnectorPolicyAssignmentPolicy(Policy policy) {
+        this.policy = policy;
+    }
+
+    @Override
+    public IConnectorPolicy getConnectorPolicyAssignment(IConnectorDescriptor c, int nProducers, int nConsumers,
+            int[] fanouts) {
+        if (c instanceof MToNPartitioningMergingConnectorDescriptor) {
+            // avoid deadlocks
+            switch (policy) {
+                case PIPELINING:
+                case SEND_SIDE_MAT_PIPELINING:
+                    return sendSideMatPipeliningPolicy;
+                case SEND_SIDE_MAT_BLOCKING:
+                    return sendSideMatBlockingPolicy;
+                case SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING:
+                    return sendSideMatReceiveSideMatBlockingPolicy;
+                default:
+                    return sendSideMatPipeliningPolicy;
+            }
+        } else if (c instanceof MToNPartitioningConnectorDescriptor) {
+            // support different repartitioning policies
+            switch (policy) {
+                case PIPELINING:
+                    return pipeliningPolicy;
+                case SEND_SIDE_MAT_PIPELINING:
+                    return sendSideMatPipeliningPolicy;
+                case SEND_SIDE_MAT_BLOCKING:
+                    return sendSideMatBlockingPolicy;
+                case SEND_SIDE_MAT_RECEIVE_SIDE_MAT_BLOCKING:
+                    return sendSideMatReceiveSideMatBlockingPolicy;
+                default:
+                    return pipeliningPolicy;
+            }
+        } else {
+            // pipelining for other connectors
+            return pipeliningPolicy;
+        }
+    }
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSink.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSink.java
new file mode 100644
index 0000000..ccc2e6c
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSink.java
@@ -0,0 +1,32 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSink;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.RandomPartitioningProperty;

+

+public class HiveDataSink implements IDataSink {

+

+    private Object[] schema;

+

+    private Object fsOperator;

+

+    public HiveDataSink(Object sink, Object[] sourceSchema) {

+        schema = sourceSchema;

+        fsOperator = sink;

+    }

+

+    @Override

+    public Object getId() {

+        return fsOperator;

+    }

+

+    @Override

+    public Object[] getSchemaTypes() {

+        return schema;

+    }

+

+    public IPartitioningProperty getPartitioningProperty() {

+        return new RandomPartitioningProperty(new HiveDomain());

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSource.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSource.java
new file mode 100644
index 0000000..67b743b
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSource.java
@@ -0,0 +1,47 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import java.util.List;

+

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourcePropertiesProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.FunctionalDependency;

+

+public class HiveDataSource<P> implements IDataSource<P> {

+

+    private P source;

+

+    private Object[] schema;

+

+    public HiveDataSource(P dataSource, Object[] sourceSchema) {

+        source = dataSource;

+        schema = sourceSchema;

+    }

+

+    @Override

+    public P getId() {

+        return source;

+    }

+

+    @Override

+    public Object[] getSchemaTypes() {

+        return schema;

+    }

+

+    @Override

+    public void computeFDs(List<LogicalVariable> scanVariables, List<FunctionalDependency> fdList) {

+    }

+

+    @Override

+    public IDataSourcePropertiesProvider getPropertiesProvider() {

+        return new HiveDataSourcePartitioningProvider();

+    }

+

+    @Override

+    public String toString() {

+        PartitionDesc desc = (PartitionDesc) source;

+        return desc.getTableName();

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSourcePartitioningProvider.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSourcePartitioningProvider.java
new file mode 100644
index 0000000..bb9c4ce
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDataSourcePartitioningProvider.java
@@ -0,0 +1,23 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import java.util.LinkedList;

+import java.util.List;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourcePropertiesProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.RandomPartitioningProperty;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;

+

+public class HiveDataSourcePartitioningProvider implements IDataSourcePropertiesProvider {

+

+    @Override

+    public IPhysicalPropertiesVector computePropertiesVector(List<LogicalVariable> scanVariables) {

+        IPartitioningProperty property = new RandomPartitioningProperty(new HiveDomain());

+        IPhysicalPropertiesVector vector = new StructuralPropertiesVector(property,

+                new LinkedList<ILocalStructuralProperty>());

+        return vector;

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDomain.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDomain.java
new file mode 100644
index 0000000..8b1d3b5
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveDomain.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.INodeDomain;

+

+public class HiveDomain implements INodeDomain {

+

+    @Override

+    public boolean sameAs(INodeDomain domain) {

+        return true;

+    }

+

+    @Override

+    public Integer cardinality() {

+        return 0;

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveMetaDataProvider.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveMetaDataProvider.java
new file mode 100644
index 0000000..daf6a7f
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveMetaDataProvider.java
@@ -0,0 +1,137 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import java.util.HashMap;

+import java.util.List;

+

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.HiveFunctionInfo;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveScanRuntimeGenerator;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveWriteRuntimeGenerator;

+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSink;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;

+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;

+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;

+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;

+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;

+import edu.uci.ics.hyracks.api.job.JobSpecification;

+

+@SuppressWarnings("rawtypes")

+public class HiveMetaDataProvider<S, T> implements IMetadataProvider<S, T> {

+

+    private Operator fileSink;

+    private Schema outputSchema;

+    private HashMap<S, IDataSource<S>> dataSourceMap;

+

+    public HiveMetaDataProvider(Operator fsOp, Schema oi, HashMap<S, IDataSource<S>> map) {

+        fileSink = fsOp;

+        outputSchema = oi;

+        dataSourceMap = map;

+    }

+

+    @Override

+    public IDataSourceIndex<T, S> findDataSourceIndex(T indexId, S dataSourceId) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public IDataSource<S> findDataSource(S id) throws AlgebricksException {

+        return dataSourceMap.get(id);

+    }

+

+    @Override

+    public boolean scannerOperatorIsLeaf(IDataSource<S> dataSource) {

+        return true;

+    }

+

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getScannerRuntime(IDataSource<S> dataSource,

+            List<LogicalVariable> scanVariables, List<LogicalVariable> projectVariables, boolean projectPushed,

+            IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv, JobGenContext context, JobSpecification jobSpec)

+            throws AlgebricksException {

+

+        S desc = dataSource.getId();

+        HiveScanRuntimeGenerator generator = new HiveScanRuntimeGenerator((PartitionDesc) desc);

+        return generator.getRuntimeOperatorAndConstraint(dataSource, scanVariables, projectVariables, projectPushed,

+                context, jobSpec);

+    }

+

+    @Override

+    public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteFileRuntime(IDataSink sink,

+            int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc) {

+

+        HiveWriteRuntimeGenerator generator = new HiveWriteRuntimeGenerator((FileSinkOperator) fileSink, outputSchema);

+        return generator.getWriterRuntime(inputDesc);

+    }

+

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getDeleteRuntime(IDataSource<S> arg0,

+            IOperatorSchema arg1, List<LogicalVariable> arg2, LogicalVariable arg3, RecordDescriptor arg4,

+            JobGenContext arg5, JobSpecification arg6) throws AlgebricksException {

+        // TODO Auto-generated method stub

+        return null;

+    }

+

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertRuntime(IDataSource<S> arg0,

+            IOperatorSchema arg1, List<LogicalVariable> arg2, LogicalVariable arg3, RecordDescriptor arg4,

+            JobGenContext arg5, JobSpecification arg6) throws AlgebricksException {

+        // TODO Auto-generated method stub

+        return null;

+    }

+

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getResultHandleRuntime(IDataSink sink,

+            int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc, boolean ordered,

+            JobSpecification spec) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getWriteResultRuntime(IDataSource<S> arg0,

+            IOperatorSchema arg1, List<LogicalVariable> arg2, LogicalVariable arg3, JobGenContext arg4,

+            JobSpecification arg5) throws AlgebricksException {

+        // TODO Auto-generated method stub

+        return null;

+    }

+

+    @Override

+    public IFunctionInfo lookupFunction(FunctionIdentifier arg0) {

+        return new HiveFunctionInfo(arg0, null);

+    }

+

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertRuntime(

+            IDataSourceIndex<T, S> dataSource, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,

+            IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,

+            ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec)

+            throws AlgebricksException {

+        // TODO Auto-generated method stub

+        return null;

+    }

+

+    @Override

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexDeleteRuntime(

+            IDataSourceIndex<T, S> dataSource, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas,

+            IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,

+            ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec)

+            throws AlgebricksException {

+        // TODO Auto-generated method stub

+        return null;

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveOperatorSchema.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveOperatorSchema.java
new file mode 100644
index 0000000..cdb0e95
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveOperatorSchema.java
@@ -0,0 +1,84 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;

+

+public class HiveOperatorSchema implements IOperatorSchema {

+

+    private final Map<LogicalVariable, Integer> varMap;

+

+    private final List<LogicalVariable> varList;

+

+    public HiveOperatorSchema() {

+        varMap = new HashMap<LogicalVariable, Integer>();

+        varList = new ArrayList<LogicalVariable>();

+    }

+

+    @Override

+    public void addAllVariables(IOperatorSchema source) {

+        for (LogicalVariable v : source) {

+            varMap.put(v, varList.size());

+            varList.add(v);

+        }

+    }

+

+    @Override

+    public void addAllNewVariables(IOperatorSchema source) {

+        for (LogicalVariable v : source) {

+            if (varMap.get(v) == null) {

+                varMap.put(v, varList.size());

+                varList.add(v);

+            }

+        }

+    }

+

+    @Override

+    public int addVariable(LogicalVariable var) {

+        int idx = varList.size();

+        varMap.put(var, idx);

+        varList.add(var);

+        return idx;

+    }

+

+    @Override

+    public void clear() {

+        varMap.clear();

+        varList.clear();

+    }

+

+    @Override

+    public int findVariable(LogicalVariable var) {

+        Integer i = varMap.get(var);

+        if (i == null) {

+            return -1;

+        }

+        return i;

+    }

+

+    @Override

+    public int getSize() {

+        return varList.size();

+    }

+

+    @Override

+    public LogicalVariable getVariable(int index) {

+        return varList.get(index);

+    }

+

+    @Override

+    public Iterator<LogicalVariable> iterator() {

+        return varList.iterator();

+    }

+

+    @Override

+    public String toString() {

+        return varMap.toString();

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveScanRuntimeGenerator.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveScanRuntimeGenerator.java
new file mode 100644
index 0000000..5e4e21e
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveScanRuntimeGenerator.java
@@ -0,0 +1,117 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import java.util.List;

+import java.util.Map;

+import java.util.Properties;

+

+import org.apache.hadoop.fs.Path;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+import org.apache.hadoop.mapred.InputSplit;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hivesterix.common.config.ConfUtil;

+import edu.uci.ics.hivesterix.runtime.operator.filescan.HiveKeyValueParserFactory;

+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;

+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;

+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;

+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;

+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;

+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;

+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;

+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;

+import edu.uci.ics.hyracks.api.job.JobSpecification;

+import edu.uci.ics.hyracks.api.topology.ClusterTopology;

+import edu.uci.ics.hyracks.hdfs.dataflow.HDFSReadOperatorDescriptor;

+import edu.uci.ics.hyracks.hdfs.scheduler.Scheduler;

+

+@SuppressWarnings({ "rawtypes", "deprecation" })

+public class HiveScanRuntimeGenerator {

+

+    private PartitionDesc fileDesc;

+

+    private transient Path filePath;

+

+    private String filePathName;

+

+    private Properties properties;

+

+    public HiveScanRuntimeGenerator(PartitionDesc path) {

+        fileDesc = path;

+        properties = fileDesc.getProperties();

+

+        String inputPath = (String) properties.getProperty("location");

+

+        if (inputPath.startsWith("file:")) {

+            // Windows

+            String[] strs = inputPath.split(":");

+            filePathName = strs[strs.length - 1];

+        } else {

+            // Linux

+            filePathName = inputPath;

+        }

+

+        filePath = new Path(filePathName);

+    }

+

+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getRuntimeOperatorAndConstraint(

+            IDataSource dataSource, List<LogicalVariable> scanVariables, List<LogicalVariable> projectVariables,

+            boolean projectPushed, JobGenContext context, JobSpecification jobSpec) throws AlgebricksException {

+        try {

+            // get the correct delimiter from Hive metastore or other data

+            // structures

+            IOperatorSchema propagatedSchema = new HiveOperatorSchema();

+

+            List<LogicalVariable> outputVariables = projectPushed ? projectVariables : scanVariables;

+            for (LogicalVariable var : outputVariables)

+                propagatedSchema.addVariable(var);

+

+            int[] outputColumnsOffset = new int[scanVariables.size()];

+            int i = 0;

+            for (LogicalVariable var : scanVariables)

+                if (outputVariables.contains(var)) {

+                    int offset = outputVariables.indexOf(var);

+                    outputColumnsOffset[i++] = offset;

+                } else

+                    outputColumnsOffset[i++] = -1;

+

+            Object[] schemaTypes = dataSource.getSchemaTypes();

+            // get record descriptor

+            RecordDescriptor recDescriptor = mkRecordDescriptor(propagatedSchema, schemaTypes, context);

+

+            // setup the run time operator and constraints

+            JobConf conf = ConfUtil.getJobConf(fileDesc.getInputFileFormatClass(), filePath);

+            String[] locConstraints = ConfUtil.getNCs();

+            Map<String, NodeControllerInfo> ncNameToNcInfos = ConfUtil.getNodeControllerInfo();

+            ClusterTopology topology = ConfUtil.getClusterTopology();

+            Scheduler scheduler = new Scheduler(ncNameToNcInfos, topology);

+            InputSplit[] splits = conf.getInputFormat().getSplits(conf, locConstraints.length);

+            String[] schedule = scheduler.getLocationConstraints(splits);

+            IOperatorDescriptor scanner = new HDFSReadOperatorDescriptor(jobSpec, recDescriptor, conf, splits,

+                    schedule, new HiveKeyValueParserFactory(fileDesc, conf, outputColumnsOffset));

+

+            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(scanner,

+                    new AlgebricksAbsolutePartitionConstraint(locConstraints));

+        } catch (Exception e) {

+            throw new AlgebricksException(e);

+        }

+    }

+

+    private static RecordDescriptor mkRecordDescriptor(IOperatorSchema opSchema, Object[] types, JobGenContext context)

+            throws AlgebricksException {

+        ISerializerDeserializer[] fields = new ISerializerDeserializer[opSchema.getSize()];

+        ISerializerDeserializerProvider sdp = context.getSerializerDeserializerProvider();

+        int size = opSchema.getSize();

+        for (int i = 0; i < size; i++) {

+            Object t = types[i];

+            fields[i] = sdp.getSerializerDeserializer(t);

+            i++;

+        }

+        return new RecordDescriptor(fields);

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveWriteRuntimeGenerator.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveWriteRuntimeGenerator.java
new file mode 100644
index 0000000..7a577e8
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/HiveWriteRuntimeGenerator.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hivesterix.common.config.ConfUtil;

+import edu.uci.ics.hivesterix.runtime.operator.filewrite.HivePushRuntimeFactory;

+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;

+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;

+

+@SuppressWarnings("deprecation")

+public class HiveWriteRuntimeGenerator {

+    private FileSinkOperator fileSink;

+

+    private Schema inputSchema;

+

+    public HiveWriteRuntimeGenerator(FileSinkOperator fsOp, Schema oi) {

+        fileSink = fsOp;

+        inputSchema = oi;

+    }

+

+    /**

+     * get the write runtime

+     * 

+     * @param inputDesc

+     * @return

+     */

+    public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriterRuntime(RecordDescriptor inputDesc) {

+        JobConf conf = ConfUtil.getJobConf();

+        IPushRuntimeFactory factory = new HivePushRuntimeFactory(inputDesc, conf, fileSink, inputSchema);

+        Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> pair = new Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint>(

+                factory, null);

+        return pair;

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/Schema.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/Schema.java
new file mode 100644
index 0000000..927c709
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/jobgen/Schema.java
@@ -0,0 +1,39 @@
+package edu.uci.ics.hivesterix.runtime.jobgen;

+

+import java.io.Serializable;

+import java.util.List;

+

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+

+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;

+

+public class Schema implements Serializable {

+

+    private static final long serialVersionUID = 1L;

+

+    private List<String> fieldNames;

+

+    private List<TypeInfo> fieldTypes;

+

+    public Schema(List<String> fieldNames, List<TypeInfo> fieldTypes) {

+        this.fieldNames = fieldNames;

+        this.fieldTypes = fieldTypes;

+    }

+

+    public ObjectInspector toObjectInspector() {

+        return LazyUtils.getLazyObjectInspector(fieldNames, fieldTypes);

+    }

+

+    public List<String> getNames() {

+        return fieldNames;

+    }

+

+    public List<TypeInfo> getTypes() {

+        return fieldTypes;

+    }

+

+    public Object[] getSchema() {

+        return fieldTypes.toArray();

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveKeyValueParser.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveKeyValueParser.java
new file mode 100644
index 0000000..472994a
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveKeyValueParser.java
@@ -0,0 +1,209 @@
+package edu.uci.ics.hivesterix.runtime.operator.filescan;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import edu.uci.ics.hivesterix.serde.parser.IHiveParser;
+import edu.uci.ics.hivesterix.serde.parser.TextToBinaryTupleParser;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParser;
+
+@SuppressWarnings("deprecation")
+public class HiveKeyValueParser<K, V> implements IKeyValueParser<K, V> {
+    /**
+     * the columns to output: projection is pushed into this scan
+     */
+    private int[] outputColumnsOffset;
+
+    /**
+     * serialization/de-serialization object
+     */
+    private SerDe serDe;
+
+    /**
+     * the input row object inspector
+     */
+    private StructObjectInspector structInspector;
+
+    /**
+     * the hadoop job conf
+     */
+    private JobConf job;
+
+    /**
+     * Hyrax context to control resource allocation
+     */
+    private final IHyracksTaskContext ctx;
+
+    /**
+     * lazy serde: format flow in between operators
+     */
+    private final SerDe outputSerDe;
+
+    /**
+     * the parser from hive data to binary data
+     */
+    private IHiveParser parser;
+
+    /**
+     * the buffer for buffering output data
+     */
+    private ByteBuffer buffer;
+
+    /**
+     * the frame tuple appender
+     */
+    private FrameTupleAppender appender;
+
+    /**
+     * the array tuple builder
+     */
+    private ArrayTupleBuilder tb;
+
+    /**
+     * the field references of all fields
+     */
+    private List<? extends StructField> fieldRefs;
+
+    /**
+     * output fields
+     */
+    private Object[] outputFields;
+
+    /**
+     * output field references
+     */
+    private StructField[] outputFieldRefs;
+
+    public HiveKeyValueParser(String serDeClass, String outputSerDeClass, Properties tbl, JobConf conf,
+            final IHyracksTaskContext ctx, int[] outputColumnsOffset) throws HyracksDataException {
+        try {
+            job = conf;
+            // initialize the input serde
+            serDe = (SerDe) ReflectionUtils.newInstance(Class.forName(serDeClass), job);
+            serDe.initialize(job, tbl);
+            // initialize the output serde
+            outputSerDe = (SerDe) ReflectionUtils.newInstance(Class.forName(outputSerDeClass), job);
+            outputSerDe.initialize(job, tbl);
+            // object inspector of the row
+            structInspector = (StructObjectInspector) serDe.getObjectInspector();
+            // hyracks context
+            this.ctx = ctx;
+            this.outputColumnsOffset = outputColumnsOffset;
+
+            if (structInspector instanceof LazySimpleStructObjectInspector) {
+                LazySimpleStructObjectInspector rowInspector = (LazySimpleStructObjectInspector) structInspector;
+                List<? extends StructField> fieldRefs = rowInspector.getAllStructFieldRefs();
+                boolean lightWeightParsable = true;
+                for (StructField fieldRef : fieldRefs) {
+                    Category category = fieldRef.getFieldObjectInspector().getCategory();
+                    if (!(category == Category.PRIMITIVE)) {
+                        lightWeightParsable = false;
+                        break;
+                    }
+                }
+                if (lightWeightParsable) {
+                    parser = new TextToBinaryTupleParser(this.outputColumnsOffset, structInspector);
+                }
+            }
+
+            fieldRefs = structInspector.getAllStructFieldRefs();
+            int size = 0;
+            for (int i = 0; i < outputColumnsOffset.length; i++) {
+                if (outputColumnsOffset[i] >= 0) {
+                    size++;
+                }
+            }
+
+            tb = new ArrayTupleBuilder(size);
+            outputFieldRefs = new StructField[size];
+            outputFields = new Object[size];
+            for (int i = 0; i < outputColumnsOffset.length; i++)
+                if (outputColumnsOffset[i] >= 0)
+                    outputFieldRefs[outputColumnsOffset[i]] = fieldRefs.get(i);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void open(IFrameWriter writer) throws HyracksDataException {
+        buffer = ctx.allocateFrame();
+        appender = new FrameTupleAppender(ctx.getFrameSize());
+        appender.reset(buffer, true);
+    }
+
+    @Override
+    public void parse(K key, V value, IFrameWriter writer) throws HyracksDataException {
+        try {
+            tb.reset();
+            if (parser != null) {
+                Text text = (Text) value;
+                parser.parse(text.getBytes(), 0, text.getLength(), tb);
+            } else {
+                Object row = serDe.deserialize((Writable) value);
+                /**
+                 * write fields to the tuple builder one by one
+                 */
+                int i = 0;
+                for (StructField fieldRef : fieldRefs) {
+                    if (outputColumnsOffset[i] >= 0)
+                        outputFields[outputColumnsOffset[i]] = structInspector.getStructFieldData(row, fieldRef);
+                    i++;
+                }
+                i = 0;
+                DataOutput dos = tb.getDataOutput();
+                for (Object field : outputFields) {
+                    BytesWritable fieldWritable = (BytesWritable) outputSerDe.serialize(field,
+                            outputFieldRefs[i].getFieldObjectInspector());
+                    dos.write(fieldWritable.getBytes(), 0, fieldWritable.getSize());
+                    tb.addFieldEndOffset();
+                    i++;
+                }
+            }
+
+            /**
+             * append the tuple and flush it if necessary.
+             */
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                FrameUtils.flushFrame(buffer, writer);
+                appender.reset(buffer, true);
+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                    throw new IllegalStateException();
+                }
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void close(IFrameWriter writer) throws HyracksDataException {
+        /**
+         * flush the residual tuples
+         */
+        if (appender.getTupleCount() > 0) {
+            FrameUtils.flushFrame(buffer, writer);
+        }
+        System.gc();
+    }
+
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveKeyValueParserFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveKeyValueParserFactory.java
new file mode 100644
index 0000000..05903b9
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filescan/HiveKeyValueParserFactory.java
@@ -0,0 +1,39 @@
+package edu.uci.ics.hivesterix.runtime.operator.filescan;
+
+import java.util.Properties;
+
+import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.mapred.JobConf;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazySerDe;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParser;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParserFactory;
+import edu.uci.ics.hyracks.hdfs.dataflow.ConfFactory;
+
+@SuppressWarnings("deprecation")
+public class HiveKeyValueParserFactory<K, V> implements IKeyValueParserFactory<K, V> {
+    private static final long serialVersionUID = 1L;
+    private final String serDeClass;
+    private final String outputSerDeClass = LazySerDe.class.getName();;
+    private final Properties tbl;
+    private final ConfFactory confFactory;
+    private final int[] outputColumnsOffset;
+
+    public HiveKeyValueParserFactory(PartitionDesc desc, JobConf conf, int[] outputColumnsOffset)
+            throws HyracksDataException {
+        this.tbl = desc.getProperties();
+        this.serDeClass = (String) tbl.getProperty("serialization.lib");
+        this.outputColumnsOffset = outputColumnsOffset;
+        this.confFactory = new ConfFactory(conf);
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    public IKeyValueParser<K, V> createKeyValueParser(IHyracksTaskContext ctx) throws HyracksDataException {
+        return new HiveKeyValueParser(serDeClass, outputSerDeClass, tbl, confFactory.getConf(), ctx,
+                outputColumnsOffset);
+    }
+
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HiveFileWritePushRuntime.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HiveFileWritePushRuntime.java
new file mode 100644
index 0000000..81faf38
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HiveFileWritePushRuntime.java
@@ -0,0 +1,153 @@
+package edu.uci.ics.hivesterix.runtime.operator.filewrite;

+

+import java.nio.ByteBuffer;

+

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.OperatorFactory;

+import org.apache.hadoop.hive.ql.exec.RowSchema;

+import org.apache.hadoop.hive.ql.metadata.HiveException;

+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;

+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hivesterix.serde.lazy.LazyColumnar;

+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyColumnarObjectInspector;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntime;

+import edu.uci.ics.hyracks.api.comm.IFrameWriter;

+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;

+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;

+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;

+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;

+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;

+

+@SuppressWarnings("deprecation")

+public class HiveFileWritePushRuntime implements IPushRuntime {

+

+	/**

+	 * frame tuple accessor to access byte buffer

+	 */

+	private final FrameTupleAccessor accessor;

+

+	/**

+	 * input object inspector

+	 */

+	private final ObjectInspector inputInspector;

+

+	/**

+	 * cachedInput

+	 */

+	private final LazyColumnar cachedInput;

+

+	/**

+	 * File sink operator of Hive

+	 */

+	private final FileSinkDesc fileSink;

+

+	/**

+	 * job configuration, which contain name node and other configuration

+	 * information

+	 */

+	private JobConf conf;

+

+	/**

+	 * input object inspector

+	 */

+	private final Schema inputSchema;

+

+	/**

+	 * a copy of hive schema representation

+	 */

+	private RowSchema rowSchema;

+

+	/**

+	 * the Hive file sink operator

+	 */

+	private FileSinkOperator fsOp;

+

+	/**

+	 * cached tuple object reference

+	 */

+	private FrameTupleReference tuple = new FrameTupleReference();

+

+	/**

+	 * @param spec

+	 * @param fsProvider

+	 */

+	public HiveFileWritePushRuntime(IHyracksTaskContext context,

+			RecordDescriptor inputRecordDesc, JobConf job, FileSinkDesc fs,

+			RowSchema schema, Schema oi) {

+		fileSink = fs;

+		fileSink.setGatherStats(false);

+

+		rowSchema = schema;

+		conf = job;

+		inputSchema = oi;

+

+		accessor = new FrameTupleAccessor(context.getFrameSize(),

+				inputRecordDesc);

+		inputInspector = inputSchema.toObjectInspector();

+		cachedInput = new LazyColumnar(

+				(LazyColumnarObjectInspector) inputInspector);

+	}

+

+	@Override

+	public void open() throws HyracksDataException {

+		fsOp = (FileSinkOperator) OperatorFactory.get(fileSink, rowSchema);

+		fsOp.setChildOperators(null);

+		fsOp.setParentOperators(null);

+		conf.setClassLoader(this.getClass().getClassLoader());

+

+		ObjectInspector[] inspectors = new ObjectInspector[1];

+		inspectors[0] = inputInspector;

+		try {

+			fsOp.initialize(conf, inspectors);

+			fsOp.setExecContext(null);

+		} catch (Exception e) {

+			e.printStackTrace();

+		}

+	}

+

+	@Override

+	public void nextFrame(ByteBuffer buffer) throws HyracksDataException {

+		accessor.reset(buffer);

+		int n = accessor.getTupleCount();

+		try {

+			for (int i = 0; i < n; ++i) {

+				tuple.reset(accessor, i);

+				cachedInput.init(tuple);

+				fsOp.process(cachedInput, 0);

+			}

+		} catch (HiveException e) {

+			throw new HyracksDataException(e);

+		}

+	}

+

+	@Override

+	public void close() throws HyracksDataException {

+		try {

+			Thread.currentThread().setContextClassLoader(

+					this.getClass().getClassLoader());

+			fsOp.closeOp(false);

+		} catch (HiveException e) {

+			throw new HyracksDataException(e);

+		}

+	}

+

+	@Override

+	public void setFrameWriter(int index, IFrameWriter writer,

+			RecordDescriptor recordDesc) {

+		throw new IllegalStateException();

+	}

+

+	@Override

+	public void setInputRecordDescriptor(int index,

+			RecordDescriptor recordDescriptor) {

+	}

+

+	@Override

+	public void fail() throws HyracksDataException {

+

+	}

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HivePushRuntimeFactory.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HivePushRuntimeFactory.java
new file mode 100644
index 0000000..6c18231
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/operator/filewrite/HivePushRuntimeFactory.java
@@ -0,0 +1,105 @@
+package edu.uci.ics.hivesterix.runtime.operator.filewrite;

+

+import java.io.DataInputStream;

+import java.io.DataOutputStream;

+import java.io.File;

+import java.io.FileInputStream;

+import java.io.FileOutputStream;

+import java.io.OutputStreamWriter;

+import java.io.PrintWriter;

+import java.util.UUID;

+

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.RowSchema;

+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;

+import org.apache.hadoop.mapred.JobConf;

+

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntime;

+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;

+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;

+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;

+

+@SuppressWarnings("deprecation")

+public class HivePushRuntimeFactory implements IPushRuntimeFactory {

+

+    private static final long serialVersionUID = 1L;

+

+    private final RecordDescriptor inputRecordDesc;

+    private transient JobConf conf;

+    private final FileSinkDesc fileSink;

+    private final RowSchema outSchema;

+    private final Schema schema;

+

+    /**

+     * the content of the configuration

+     */

+    private String confContent;

+

+    public HivePushRuntimeFactory(RecordDescriptor inputRecordDesc, JobConf conf, FileSinkOperator fsp, Schema sch) {

+        this.inputRecordDesc = inputRecordDesc;

+        this.conf = conf;

+        this.fileSink = fsp.getConf();

+        outSchema = fsp.getSchema();

+        this.schema = sch;

+

+        writeConfContent();

+    }

+

+    @Override

+    public String toString() {

+        return "file write";

+    }

+

+    @Override

+    public IPushRuntime createPushRuntime(IHyracksTaskContext context) throws AlgebricksException {

+        if (conf == null)

+            readConfContent();

+

+        return new HiveFileWritePushRuntime(context, inputRecordDesc, conf, fileSink, outSchema, schema);

+    }

+

+    private void readConfContent() {

+        File dir = new File("hadoop-conf-tmp");

+        if (!dir.exists()) {

+            dir.mkdir();

+        }

+

+        String fileName = "hadoop-conf-tmp/" + UUID.randomUUID() + System.currentTimeMillis() + ".xml";

+        try {

+            PrintWriter out = new PrintWriter((new OutputStreamWriter(new FileOutputStream(new File(fileName)))));

+            out.write(confContent);

+            out.close();

+            conf = new JobConf(fileName);

+        } catch (Exception e) {

+            e.printStackTrace();

+        }

+    }

+

+    private void writeConfContent() {

+        File dir = new File("hadoop-conf-tmp");

+        if (!dir.exists()) {

+            dir.mkdir();

+        }

+

+        String fileName = "hadoop-conf-tmp/" + UUID.randomUUID() + System.currentTimeMillis() + ".xml";

+        try {

+            DataOutputStream out = new DataOutputStream(new FileOutputStream(new File(fileName)));

+            conf.writeXml(out);

+            out.close();

+

+            DataInputStream in = new DataInputStream(new FileInputStream(fileName));

+            StringBuffer buffer = new StringBuffer();

+            String line;

+            while ((line = in.readLine()) != null) {

+                buffer.append(line + "\n");

+            }

+            in.close();

+            confContent = buffer.toString();

+        } catch (Exception e) {

+            e.printStackTrace();

+        }

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryComparatorFactoryProvider.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryComparatorFactoryProvider.java
new file mode 100644
index 0000000..467ec0a
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryComparatorFactoryProvider.java
@@ -0,0 +1,75 @@
+package edu.uci.ics.hivesterix.runtime.provider;

+

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveByteBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveByteBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveDoubleBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveDoubleBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveFloatBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveFloatBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveIntegerBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveIntegerBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveLongBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveLongBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveShortBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveShortBinaryDescComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveStringBinaryAscComparatorFactory;

+import edu.uci.ics.hivesterix.runtime.factory.comparator.HiveStringBinaryDescComparatorFactory;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;

+import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;

+

+public class HiveBinaryComparatorFactoryProvider implements IBinaryComparatorFactoryProvider {

+

+    public static final HiveBinaryComparatorFactoryProvider INSTANCE = new HiveBinaryComparatorFactoryProvider();

+

+    private HiveBinaryComparatorFactoryProvider() {

+    }

+

+    @Override

+    public IBinaryComparatorFactory getBinaryComparatorFactory(Object type, boolean ascending)

+            throws AlgebricksException {

+        if (type.equals(TypeInfoFactory.intTypeInfo)) {

+            if (ascending)

+                return HiveIntegerBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveIntegerBinaryDescComparatorFactory.INSTANCE;

+

+        } else if (type.equals(TypeInfoFactory.longTypeInfo)) {

+            if (ascending)

+                return HiveLongBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveLongBinaryDescComparatorFactory.INSTANCE;

+

+        } else if (type.equals(TypeInfoFactory.floatTypeInfo)) {

+            if (ascending)

+                return HiveFloatBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveFloatBinaryDescComparatorFactory.INSTANCE;

+

+        } else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {

+            if (ascending)

+                return HiveDoubleBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveDoubleBinaryDescComparatorFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.shortTypeInfo)) {

+            if (ascending)

+                return HiveShortBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveShortBinaryDescComparatorFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.stringTypeInfo)) {

+            if (ascending)

+                return HiveStringBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveStringBinaryDescComparatorFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.byteTypeInfo) || type.equals(TypeInfoFactory.booleanTypeInfo)) {

+            if (ascending)

+                return HiveByteBinaryAscComparatorFactory.INSTANCE;

+            else

+                return HiveByteBinaryDescComparatorFactory.INSTANCE;

+        } else

+            throw new NotImplementedException();

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFactoryProvider.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFactoryProvider.java
new file mode 100644
index 0000000..473eee1
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFactoryProvider.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.hivesterix.runtime.provider;

+

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.HiveDoubleBinaryHashFunctionFactory;

+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.HiveIntegerBinaryHashFunctionFactory;

+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.HiveLongBinaryHashFunctionFactory;

+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.HiveRawBinaryHashFunctionFactory;

+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.HiveStingBinaryHashFunctionFactory;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;

+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;

+

+public class HiveBinaryHashFunctionFactoryProvider implements IBinaryHashFunctionFactoryProvider {

+

+    public static final HiveBinaryHashFunctionFactoryProvider INSTANCE = new HiveBinaryHashFunctionFactoryProvider();

+

+    private HiveBinaryHashFunctionFactoryProvider() {

+    }

+

+    @Override

+    public IBinaryHashFunctionFactory getBinaryHashFunctionFactory(Object type) throws AlgebricksException {

+        if (type.equals(TypeInfoFactory.intTypeInfo)) {

+            return HiveIntegerBinaryHashFunctionFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.longTypeInfo)) {

+            return HiveLongBinaryHashFunctionFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.stringTypeInfo)) {

+            return HiveStingBinaryHashFunctionFactory.INSTANCE;

+        } else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {

+            return HiveDoubleBinaryHashFunctionFactory.INSTANCE;

+        } else {

+            return HiveRawBinaryHashFunctionFactory.INSTANCE;

+        }

+    }

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFamilyProvider.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFamilyProvider.java
new file mode 100644
index 0000000..e7a2e79
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFamilyProvider.java
@@ -0,0 +1,20 @@
+package edu.uci.ics.hivesterix.runtime.provider;
+
+import edu.uci.ics.hivesterix.runtime.factory.hashfunction.MurmurHash3BinaryHashFunctionFamily;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+
+public class HiveBinaryHashFunctionFamilyProvider implements IBinaryHashFunctionFamilyProvider {
+
+    public static HiveBinaryHashFunctionFamilyProvider INSTANCE = new HiveBinaryHashFunctionFamilyProvider();
+
+    private HiveBinaryHashFunctionFamilyProvider() {
+
+    }
+
+    @Override
+    public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) throws AlgebricksException {
+        return MurmurHash3BinaryHashFunctionFamily.INSTANCE;
+    }
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveNormalizedKeyComputerFactoryProvider.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveNormalizedKeyComputerFactoryProvider.java
new file mode 100644
index 0000000..91bf3e5
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveNormalizedKeyComputerFactoryProvider.java
@@ -0,0 +1,51 @@
+package edu.uci.ics.hivesterix.runtime.provider;
+
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveDoubleAscNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveDoubleDescNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveIntegerAscNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveIntegerDescNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveLongAscNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveLongDescNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveStringAscNormalizedKeyComputerFactory;
+import edu.uci.ics.hivesterix.runtime.factory.normalize.HiveStringDescNormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+public class HiveNormalizedKeyComputerFactoryProvider implements INormalizedKeyComputerFactoryProvider {
+
+    public static final HiveNormalizedKeyComputerFactoryProvider INSTANCE = new HiveNormalizedKeyComputerFactoryProvider();
+
+    private HiveNormalizedKeyComputerFactoryProvider() {
+    }
+
+    @Override
+    public INormalizedKeyComputerFactory getNormalizedKeyComputerFactory(Object type, boolean ascending) {
+        if (ascending) {
+            if (type.equals(TypeInfoFactory.stringTypeInfo)) {
+                return new HiveStringAscNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.intTypeInfo)) {
+                return new HiveIntegerAscNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.longTypeInfo)) {
+                return new HiveLongAscNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {
+                return new HiveDoubleAscNormalizedKeyComputerFactory();
+            } else {
+                return null;
+            }
+        } else {
+            if (type.equals(TypeInfoFactory.stringTypeInfo)) {
+                return new HiveStringDescNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.intTypeInfo)) {
+                return new HiveIntegerDescNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.longTypeInfo)) {
+                return new HiveLongDescNormalizedKeyComputerFactory();
+            } else if (type.equals(TypeInfoFactory.doubleTypeInfo)) {
+                return new HiveDoubleDescNormalizedKeyComputerFactory();
+            } else {
+                return null;
+            }
+        }
+    }
+}
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HivePrinterFactoryProvider.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HivePrinterFactoryProvider.java
new file mode 100644
index 0000000..10c84d2
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HivePrinterFactoryProvider.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hivesterix.runtime.provider;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;

+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;

+

+public class HivePrinterFactoryProvider implements IPrinterFactoryProvider {

+

+    public static IPrinterFactoryProvider INSTANCE = new HivePrinterFactoryProvider();

+

+    @Override

+    public IPrinterFactory getPrinterFactory(Object type) throws AlgebricksException {

+        return null;

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveSerializerDeserializerProvider.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveSerializerDeserializerProvider.java
new file mode 100644
index 0000000..22f81e0
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveSerializerDeserializerProvider.java
@@ -0,0 +1,21 @@
+package edu.uci.ics.hivesterix.runtime.provider;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;

+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;

+

+public class HiveSerializerDeserializerProvider implements ISerializerDeserializerProvider {

+

+    public static final HiveSerializerDeserializerProvider INSTANCE = new HiveSerializerDeserializerProvider();

+

+    private HiveSerializerDeserializerProvider() {

+    }

+

+    @SuppressWarnings("rawtypes")

+    @Override

+    public ISerializerDeserializer getSerializerDeserializer(Object type) throws AlgebricksException {

+        // return ARecordSerializerDeserializer.SCHEMALESS_INSTANCE;

+        return null;

+    }

+

+}

diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveTypeTraitProvider.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveTypeTraitProvider.java
new file mode 100644
index 0000000..be4b149
--- /dev/null
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveTypeTraitProvider.java
@@ -0,0 +1,33 @@
+package edu.uci.ics.hivesterix.runtime.provider;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+
+public class HiveTypeTraitProvider implements ITypeTraitProvider, Serializable {
+    private static final long serialVersionUID = 1L;
+    public static HiveTypeTraitProvider INSTANCE = new HiveTypeTraitProvider();
+
+    private HiveTypeTraitProvider() {
+
+    }
+
+    @Override
+    public ITypeTraits getTypeTrait(Object arg0) {
+        return new ITypeTraits() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public int getFixedLength() {
+                return -1;
+            }
+
+            @Override
+            public boolean isFixedLength() {
+                return false;
+            }
+
+        };
+    }
+}
diff --git a/hivesterix/hivesterix-serde/pom.xml b/hivesterix/hivesterix-serde/pom.xml
new file mode 100644
index 0000000..aa649cb
--- /dev/null
+++ b/hivesterix/hivesterix-serde/pom.xml
@@ -0,0 +1,67 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<artifactId>hivesterix</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<artifactId>hivesterix-serde</artifactId>
+	<name>hivesterix-serde</name>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+					<encoding>UTF-8</encoding>
+					<fork>true</fork>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<dependencies>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-serde</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-common</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>algebricks-compiler</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+                        <groupId>edu.uci.ics.hyracks</groupId>
+                        <artifactId>hivesterix-common</artifactId>
+                        <version>0.2.3-SNAPSHOT</version>
+                        <type>jar</type>
+                        <scope>compile</scope>
+                </dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>3.8.1</version>
+			<scope>test</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/ByteArrayRef.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/ByteArrayRef.java
new file mode 100644
index 0000000..92415f9
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/ByteArrayRef.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+/**
+ * ByteArrayRef stores a reference to a byte array.
+ * The LazyObject hierarchy uses a reference to a single ByteArrayRef, so that
+ * it's much faster to switch to the next row and release the reference to the
+ * old row (so that the system can do garbage collection if needed).
+ */
+public class ByteArrayRef {
+
+    /**
+     * Stores the actual data.
+     */
+    byte[] data;
+
+    public byte[] getData() {
+        return data;
+    }
+
+    public void setData(byte[] data) {
+        this.data = data;
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyArray.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyArray.java
new file mode 100644
index 0000000..33b20bf
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyArray.java
@@ -0,0 +1,229 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.RecordInfo;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyListObjectInspector;
+
+/**
+ * LazyArray is serialized as follows: start A b b b b b b end bytes[] ->
+ * |--------|---|---|---|---| ... |---|---|
+ * Section A is the null-bytes. Suppose the list has N elements, then there are
+ * (N+7)/8 bytes used as null-bytes. Each bit corresponds to an element and it
+ * indicates whether that element is null (0) or not null (1).
+ * After A, all b(s) represent the elements of the list. Each of them is again a
+ * LazyObject.
+ */
+
+public class LazyArray extends LazyNonPrimitive<LazyListObjectInspector> {
+
+    /**
+     * Whether the data is already parsed or not.
+     */
+    boolean parsed = false;
+    /**
+     * The length of the array. Only valid when the data is parsed.
+     */
+    int arraySize = 0;
+
+    /**
+     * The start positions and lengths of array elements. Only valid when the
+     * data is parsed.
+     */
+    int[] elementStart;
+    int[] elementLength;
+
+    /**
+     * Whether an element is initialized or not.
+     */
+    boolean[] elementInited;
+
+    /**
+     * Whether an element is null or not. Because length is 0 does not means the
+     * field is null. In particular, a 0-length string is not null.
+     */
+    boolean[] elementIsNull;
+
+    /**
+     * The elements of the array. Note that we call arrayElements[i].init(bytes,
+     * begin, length) only when that element is accessed.
+     */
+    @SuppressWarnings("rawtypes")
+    LazyObject[] arrayElements;
+
+    /**
+     * Construct a LazyArray object with the ObjectInspector.
+     * 
+     * @param oi
+     *            the oi representing the type of this LazyArray
+     */
+    protected LazyArray(LazyListObjectInspector oi) {
+        super(oi);
+    }
+
+    /**
+     * Set the row data for this LazyArray.
+     * 
+     * @see LazyObject#init(ByteArrayRef, int, int)
+     */
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        super.init(bytes, start, length);
+        parsed = false;
+    }
+
+    /**
+     * Enlarge the size of arrays storing information for the elements inside
+     * the array.
+     */
+    private void adjustArraySize(int newSize) {
+        if (elementStart == null || elementStart.length < newSize) {
+            elementStart = new int[newSize];
+            elementLength = new int[newSize];
+            elementInited = new boolean[newSize];
+            elementIsNull = new boolean[newSize];
+            arrayElements = new LazyObject[newSize];
+        }
+    }
+
+    VInt vInt = new LazyUtils.VInt();
+    RecordInfo recordInfo = new LazyUtils.RecordInfo();
+
+    /**
+     * Parse the bytes and fill elementStart, elementLength, elementInited and
+     * elementIsNull.
+     */
+    private void parse() {
+
+        // get the vlong that represents the map size
+        LazyUtils.readVInt(bytes, start, vInt);
+        arraySize = vInt.value;
+        if (0 == arraySize) {
+            parsed = true;
+            return;
+        }
+
+        // adjust arrays
+        adjustArraySize(arraySize);
+        // find out the null-bytes
+        int arryByteStart = start + vInt.length;
+        int nullByteCur = arryByteStart;
+        int nullByteEnd = arryByteStart + (arraySize + 7) / 8;
+        // the begin the real elements
+        int lastElementByteEnd = nullByteEnd;
+        // the list element object inspector
+        ObjectInspector listEleObjectInspector = ((ListObjectInspector) oi).getListElementObjectInspector();
+        // parsing elements one by one
+        for (int i = 0; i < arraySize; i++) {
+            elementIsNull[i] = true;
+            if ((bytes[nullByteCur] & (1 << (i % 8))) != 0) {
+                elementIsNull[i] = false;
+                LazyUtils.checkObjectByteInfo(listEleObjectInspector, bytes, lastElementByteEnd, recordInfo);
+                elementStart[i] = lastElementByteEnd + recordInfo.elementOffset;
+                elementLength[i] = recordInfo.elementSize;
+                lastElementByteEnd = elementStart[i] + elementLength[i];
+            }
+            // move onto the next null byte
+            if (7 == (i % 8)) {
+                nullByteCur++;
+            }
+        }
+
+        Arrays.fill(elementInited, 0, arraySize, false);
+        parsed = true;
+    }
+
+    /**
+     * Returns the actual primitive object at the index position inside the
+     * array represented by this LazyObject.
+     */
+    public Object getListElementObject(int index) {
+        if (!parsed) {
+            parse();
+        }
+        if (index < 0 || index >= arraySize) {
+            return null;
+        }
+        return uncheckedGetElement(index);
+    }
+
+    /**
+     * Get the element without checking out-of-bound index.
+     * 
+     * @param index
+     *            index to the array element
+     */
+    private Object uncheckedGetElement(int index) {
+
+        if (elementIsNull[index]) {
+            return null;
+        } else {
+            if (!elementInited[index]) {
+                elementInited[index] = true;
+                if (arrayElements[index] == null) {
+                    arrayElements[index] = LazyFactory.createLazyObject((oi).getListElementObjectInspector());
+                }
+                arrayElements[index].init(bytes, elementStart[index], elementLength[index]);
+            }
+        }
+        return arrayElements[index].getObject();
+    }
+
+    /**
+     * Returns the array size.
+     */
+    public int getListLength() {
+        if (!parsed) {
+            parse();
+        }
+        return arraySize;
+    }
+
+    /**
+     * cachedList is reused every time getList is called. Different
+     * LazyBianryArray instances cannot share the same cachedList.
+     */
+    ArrayList<Object> cachedList;
+
+    /**
+     * Returns the List of actual primitive objects. Returns null for null
+     * array.
+     */
+    public List<Object> getList() {
+        if (!parsed) {
+            parse();
+        }
+        if (cachedList == null) {
+            cachedList = new ArrayList<Object>(arraySize);
+        } else {
+            cachedList.clear();
+        }
+        for (int index = 0; index < arraySize; index++) {
+            cachedList.add(uncheckedGetElement(index));
+        }
+        return cachedList;
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyBoolean.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyBoolean.java
new file mode 100644
index 0000000..5a48525
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyBoolean.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.BooleanWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyBooleanObjectInspector;
+
+/**
+ * LazyObject for storing a value of boolean.
+ * <p>
+ * Part of the code is adapted from Apache Harmony Project. As with the specification, this implementation relied on code laid out in <a href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's Delight, (Addison Wesley, 2002)</a> as well as <a href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * </p>
+ */
+public class LazyBoolean extends LazyPrimitive<LazyBooleanObjectInspector, BooleanWritable> {
+
+    public LazyBoolean(LazyBooleanObjectInspector oi) {
+        super(oi);
+        data = new BooleanWritable();
+    }
+
+    public LazyBoolean(LazyBoolean copy) {
+        super(copy);
+        data = new BooleanWritable(copy.data.get());
+    }
+
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
+
+        // a temporal hack
+        assert (1 == length);
+        byte val = bytes[start];
+        if (val == 0) {
+            data.set(false);
+        } else if (val == 1) {
+            data.set(true);
+        }
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyByte.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyByte.java
new file mode 100644
index 0000000..bf4ff04
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyByte.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.ByteWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyByteObjectInspector;
+
+/**
+ * LazyObject for storing a value of Byte.
+ * <p>
+ * Part of the code is adapted from Apache Harmony Project. As with the specification, this implementation relied on code laid out in <a href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's Delight, (Addison Wesley, 2002)</a> as well as <a href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * </p>
+ */
+public class LazyByte extends LazyPrimitive<LazyByteObjectInspector, ByteWritable> {
+
+    public LazyByte(LazyByteObjectInspector oi) {
+        super(oi);
+        data = new ByteWritable();
+    }
+
+    public LazyByte(LazyByte copy) {
+        super(copy);
+        data = new ByteWritable(copy.data.get());
+    }
+
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
+
+        assert (1 == length);
+        data.set(bytes[start]);
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyColumnar.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyColumnar.java
new file mode 100644
index 0000000..d73fea7
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyColumnar.java
@@ -0,0 +1,207 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyColumnarObjectInspector;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * LazyObject for storing a struct. The field of a struct can be primitive or
+ * non-primitive.
+ * LazyStruct does not deal with the case of a NULL struct. That is handled by
+ * the parent LazyObject.
+ */
+@SuppressWarnings("rawtypes")
+public class LazyColumnar extends LazyNonPrimitive<LazyColumnarObjectInspector> {
+
+    /**
+     * IFrameTupleReference: the backend of the struct
+     */
+    IFrameTupleReference tuple;
+
+    /**
+     * Whether the data is already parsed or not.
+     */
+    boolean reset;
+
+    /**
+     * The fields of the struct.
+     */
+    LazyObject[] fields;
+
+    /**
+     * Whether init() has been called on the field or not.
+     */
+    boolean[] fieldVisited;
+
+    /**
+     * whether it is the first time initialization
+     */
+    boolean start = true;
+
+    /**
+     * Construct a LazyStruct object with the ObjectInspector.
+     */
+    public LazyColumnar(LazyColumnarObjectInspector oi) {
+        super(oi);
+    }
+
+    /**
+     * Set the row data for this LazyStruct.
+     * 
+     * @see LazyObject#init(ByteArrayRef, int, int)
+     */
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        super.init(bytes, start, length);
+        reset = false;
+    }
+
+    /**
+     * Parse the byte[] and fill each field.
+     */
+    private void parse() {
+
+        if (start) {
+            // initialize field array and reusable objects
+            List<? extends StructField> fieldRefs = ((StructObjectInspector) oi).getAllStructFieldRefs();
+
+            fields = new LazyObject[fieldRefs.size()];
+            for (int i = 0; i < fields.length; i++) {
+                fields[i] = LazyFactory.createLazyObject(fieldRefs.get(i).getFieldObjectInspector());
+            }
+            fieldVisited = new boolean[fields.length];
+            start = false;
+        }
+
+        Arrays.fill(fieldVisited, false);
+        reset = true;
+    }
+
+    /**
+     * Get one field out of the struct.
+     * If the field is a primitive field, return the actual object. Otherwise
+     * return the LazyObject. This is because PrimitiveObjectInspector does not
+     * have control over the object used by the user - the user simply directly
+     * use the Object instead of going through Object
+     * PrimitiveObjectInspector.get(Object).
+     * 
+     * @param fieldID
+     *            The field ID
+     * @return The field as a LazyObject
+     */
+    public Object getField(int fieldID) {
+        if (!reset) {
+            parse();
+        }
+        return uncheckedGetField(fieldID);
+    }
+
+    /**
+     * Get the field out of the row without checking parsed. This is called by
+     * both getField and getFieldsAsList.
+     * 
+     * @param fieldID
+     *            The id of the field starting from 0.
+     * @param nullSequence
+     *            The sequence representing NULL value.
+     * @return The value of the field
+     */
+    private Object uncheckedGetField(int fieldID) {
+        // get the buffer
+        byte[] buffer = tuple.getFieldData(fieldID);
+        // get the offset of the field
+        int s1 = tuple.getFieldStart(fieldID);
+        int l1 = tuple.getFieldLength(fieldID);
+
+        if (!fieldVisited[fieldID]) {
+            fieldVisited[fieldID] = true;
+            fields[fieldID].init(buffer, s1, l1);
+        }
+        // if (fields[fieldID].getObject() == null) {
+        // throw new IllegalStateException("illegal field " + fieldID);
+        // }
+        return fields[fieldID].getObject();
+    }
+
+    ArrayList<Object> cachedList;
+
+    /**
+     * Get the values of the fields as an ArrayList.
+     * 
+     * @return The values of the fields as an ArrayList.
+     */
+    public ArrayList<Object> getFieldsAsList() {
+        if (!reset) {
+            parse();
+        }
+        if (cachedList == null) {
+            cachedList = new ArrayList<Object>();
+        } else {
+            cachedList.clear();
+        }
+        for (int i = 0; i < fields.length; i++) {
+            cachedList.add(uncheckedGetField(i));
+        }
+        return cachedList;
+    }
+
+    @Override
+    public Object getObject() {
+        return this;
+    }
+
+    protected boolean getParsed() {
+        return reset;
+    }
+
+    protected void setParsed(boolean parsed) {
+        this.reset = parsed;
+    }
+
+    protected LazyObject[] getFields() {
+        return fields;
+    }
+
+    protected void setFields(LazyObject[] fields) {
+        this.fields = fields;
+    }
+
+    protected boolean[] getFieldInited() {
+        return fieldVisited;
+    }
+
+    protected void setFieldInited(boolean[] fieldInited) {
+        this.fieldVisited = fieldInited;
+    }
+
+    /**
+     * rebind a frametuplereference to the struct
+     */
+    public void init(IFrameTupleReference r) {
+        this.tuple = r;
+        reset = false;
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyDouble.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyDouble.java
new file mode 100644
index 0000000..1b2cc5a
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyDouble.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.DoubleWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyDoubleObjectInspector;
+
+/**
+ * LazyObject for storing a value of Double.
+ */
+public class LazyDouble extends LazyPrimitive<LazyDoubleObjectInspector, DoubleWritable> {
+
+    public LazyDouble(LazyDoubleObjectInspector oi) {
+        super(oi);
+        data = new DoubleWritable();
+    }
+
+    public LazyDouble(LazyDouble copy) {
+        super(copy);
+        data = new DoubleWritable(copy.data.get());
+    }
+
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
+        assert (8 == length);
+        data.set(Double.longBitsToDouble(LazyUtils.byteArrayToLong(bytes, start)));
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFactory.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFactory.java
new file mode 100644
index 0000000..7caa9ed
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFactory.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyColumnarObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyListObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyMapObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyStructObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyBooleanObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyByteObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyDoubleObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyFloatObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyIntObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyLongObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyShortObjectInspector;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyStringObjectInspector;
+
+/**
+ * LazyFactory.
+ */
+public final class LazyFactory {
+
+    /**
+     * Create a lazy binary primitive class given the type name.
+     */
+    public static LazyPrimitive<?, ?> createLazyPrimitiveClass(PrimitiveObjectInspector oi) {
+        PrimitiveCategory p = oi.getPrimitiveCategory();
+        switch (p) {
+            case BOOLEAN:
+                return new LazyBoolean((LazyBooleanObjectInspector) oi);
+            case BYTE:
+                return new LazyByte((LazyByteObjectInspector) oi);
+            case SHORT:
+                return new LazyShort((LazyShortObjectInspector) oi);
+            case INT:
+                return new LazyInteger((LazyIntObjectInspector) oi);
+            case LONG:
+                return new LazyLong((LazyLongObjectInspector) oi);
+            case FLOAT:
+                return new LazyFloat((LazyFloatObjectInspector) oi);
+            case DOUBLE:
+                return new LazyDouble((LazyDoubleObjectInspector) oi);
+            case STRING:
+                return new LazyString((LazyStringObjectInspector) oi);
+            default:
+                throw new RuntimeException("Internal error: no LazyObject for " + p);
+        }
+    }
+
+    /**
+     * Create a hierarchical LazyObject based on the given typeInfo.
+     */
+    public static LazyObject<? extends ObjectInspector> createLazyObject(ObjectInspector oi) {
+        ObjectInspector.Category c = oi.getCategory();
+        switch (c) {
+            case PRIMITIVE:
+                return createLazyPrimitiveClass((PrimitiveObjectInspector) oi);
+            case MAP:
+                return new LazyMap((LazyMapObjectInspector) oi);
+            case LIST:
+                return new LazyArray((LazyListObjectInspector) oi);
+            case STRUCT: // check whether it is a top-level struct
+                if (oi instanceof LazyStructObjectInspector)
+                    return new LazyStruct((LazyStructObjectInspector) oi);
+                else
+                    return new LazyColumnar((LazyColumnarObjectInspector) oi);
+            default:
+                throw new RuntimeException("Hive LazySerDe Internal error.");
+        }
+    }
+
+    private LazyFactory() {
+        // prevent instantiation
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFloat.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFloat.java
new file mode 100644
index 0000000..430ac2e
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyFloat.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.FloatWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyFloatObjectInspector;
+
+/**
+ * LazyObject for storing a value of Double.
+ */
+public class LazyFloat extends LazyPrimitive<LazyFloatObjectInspector, FloatWritable> {
+
+    public LazyFloat(LazyFloatObjectInspector oi) {
+        super(oi);
+        data = new FloatWritable();
+    }
+
+    public LazyFloat(LazyFloat copy) {
+        super(copy);
+        data = new FloatWritable(copy.data.get());
+    }
+
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
+
+        assert (4 == length);
+        data.set(Float.intBitsToFloat(LazyUtils.byteArrayToInt(bytes, start)));
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyInteger.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyInteger.java
new file mode 100644
index 0000000..0765c4f
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyInteger.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.IntWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyIntObjectInspector;
+
+/**
+ * LazyObject for storing a value of Integer.
+ * <p>
+ * Part of the code is adapted from Apache Harmony Project. As with the specification, this implementation relied on code laid out in <a href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's Delight, (Addison Wesley, 2002)</a> as well as <a href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * </p>
+ */
+public class LazyInteger extends LazyPrimitive<LazyIntObjectInspector, IntWritable> {
+
+    public LazyInteger(LazyIntObjectInspector oi) {
+        super(oi);
+        data = new IntWritable();
+    }
+
+    public LazyInteger(LazyInteger copy) {
+        super(copy);
+        data = new IntWritable(copy.data.get());
+    }
+
+    /**
+     * The reusable vInt for decoding the integer.
+     */
+    VInt vInt = new LazyUtils.VInt();
+
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
+
+        LazyUtils.readVInt(bytes, start, vInt);
+        assert (length == vInt.length);
+        if (length != vInt.length)
+            throw new IllegalStateException("parse int: length mismatch, expected " + vInt.length + " but get "
+                    + length);
+        data.set(vInt.value);
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyLong.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyLong.java
new file mode 100644
index 0000000..e6b56c3
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyLong.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.LongWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VLong;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyLongObjectInspector;
+
+/**
+ * LazyObject for storing a value of Long.
+ * <p>
+ * Part of the code is adapted from Apache Harmony Project. As with the specification, this implementation relied on code laid out in <a href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's Delight, (Addison Wesley, 2002)</a> as well as <a href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * </p>
+ */
+public class LazyLong extends LazyPrimitive<LazyLongObjectInspector, LongWritable> {
+
+    public LazyLong(LazyLongObjectInspector oi) {
+        super(oi);
+        data = new LongWritable();
+    }
+
+    public LazyLong(LazyLong copy) {
+        super(copy);
+        data = new LongWritable(copy.data.get());
+    }
+
+    /**
+     * The reusable vLong for decoding the long.
+     */
+    VLong vLong = new LazyUtils.VLong();
+
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
+
+        LazyUtils.readVLong(bytes, start, vLong);
+        assert (length == vLong.length);
+        if (length != vLong.length)
+            throw new IllegalStateException("parse long: length mismatch");
+        data.set(vLong.value);
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyMap.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyMap.java
new file mode 100644
index 0000000..9c7af2e
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyMap.java
@@ -0,0 +1,327 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.util.Arrays;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.RecordInfo;
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyMapObjectInspector;
+
+/**
+ * LazyMap is serialized as follows: start A b c b c b c end bytes[] ->
+ * |--------|---|---|---|---| ... |---|---|
+ * Section A is the null-bytes. Suppose the map has N key-value pairs, then
+ * there are (N*2+7)/8 bytes used as null-bytes. Each bit corresponds to a key
+ * or a value and it indicates whether that key or value is null (0) or not null
+ * (1).
+ * After A, all the bytes are actual serialized data of the map, which are
+ * key-value pairs. b represent the keys and c represent the values. Each of
+ * them is again a LazyObject.
+ */
+
+@SuppressWarnings("rawtypes")
+public class LazyMap extends LazyNonPrimitive<LazyMapObjectInspector> {
+
+    private static Log LOG = LogFactory.getLog(LazyMap.class.getName());
+
+    /**
+     * Whether the data is already parsed or not.
+     */
+    boolean parsed;
+
+    /**
+     * The size of the map. Only valid when the data is parsed. -1 when the map
+     * is NULL.
+     */
+    int mapSize = 0;
+
+    /**
+     * The beginning position and length of key[i] and value[i]. Only valid when
+     * the data is parsed.
+     */
+    int[] keyStart;
+    int[] keyLength;
+    int[] valueStart;
+    int[] valueLength;
+    /**
+     * Whether valueObjects[i]/keyObjects[i] is initialized or not.
+     */
+    boolean[] keyInited;
+    boolean[] valueInited;
+
+    /**
+     * Whether valueObjects[i]/keyObjects[i] is null or not This could not be
+     * inferred from the length of the object. In particular, a 0-length string
+     * is not null.
+     */
+    boolean[] keyIsNull;
+    boolean[] valueIsNull;
+
+    /**
+     * The keys are stored in an array of LazyPrimitives.
+     */
+    LazyPrimitive<?, ?>[] keyObjects;
+    /**
+     * The values are stored in an array of LazyObjects. value[index] will start
+     * from KeyEnd[index] + 1, and ends before KeyStart[index+1] - 1.
+     */
+    LazyObject[] valueObjects;
+
+    protected LazyMap(LazyMapObjectInspector oi) {
+        super(oi);
+    }
+
+    /**
+     * Set the row data for this LazyMap.
+     * 
+     * @see LazyObject#init(ByteArrayRef, int, int)
+     */
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        super.init(bytes, start, length);
+        parsed = false;
+    }
+
+    /**
+     * Adjust the size of arrays: keyStart, keyLength valueStart, valueLength
+     * keyInited, keyIsNull valueInited, valueIsNull.
+     */
+    protected void adjustArraySize(int newSize) {
+        if (keyStart == null || keyStart.length < newSize) {
+            keyStart = new int[newSize];
+            keyLength = new int[newSize];
+            valueStart = new int[newSize];
+            valueLength = new int[newSize];
+            keyInited = new boolean[newSize];
+            keyIsNull = new boolean[newSize];
+            valueInited = new boolean[newSize];
+            valueIsNull = new boolean[newSize];
+            keyObjects = new LazyPrimitive<?, ?>[newSize];
+            valueObjects = new LazyObject[newSize];
+        }
+    }
+
+    boolean nullMapKey = false;
+    VInt vInt = new LazyUtils.VInt();
+    RecordInfo recordInfo = new LazyUtils.RecordInfo();
+
+    /**
+     * Parse the byte[] and fill keyStart, keyLength, keyIsNull valueStart,
+     * valueLength and valueIsNull.
+     */
+    private void parse() {
+
+        // get the VInt that represents the map size
+        LazyUtils.readVInt(bytes, start, vInt);
+        mapSize = vInt.value;
+        if (0 == mapSize) {
+            parsed = true;
+            return;
+        }
+
+        // adjust arrays
+        adjustArraySize(mapSize);
+
+        // find out the null-bytes
+        int mapByteStart = start + vInt.length;
+        int nullByteCur = mapByteStart;
+        int nullByteEnd = mapByteStart + (mapSize * 2 + 7) / 8;
+        int lastElementByteEnd = nullByteEnd;
+
+        // parsing the keys and values one by one
+        for (int i = 0; i < mapSize; i++) {
+            // parse a key
+            keyIsNull[i] = true;
+            if ((bytes[nullByteCur] & (1 << ((i * 2) % 8))) != 0) {
+                keyIsNull[i] = false;
+                LazyUtils.checkObjectByteInfo(((MapObjectInspector) oi).getMapKeyObjectInspector(), bytes,
+                        lastElementByteEnd, recordInfo);
+                keyStart[i] = lastElementByteEnd + recordInfo.elementOffset;
+                keyLength[i] = recordInfo.elementSize;
+                lastElementByteEnd = keyStart[i] + keyLength[i];
+            } else if (!nullMapKey) {
+                nullMapKey = true;
+                LOG.warn("Null map key encountered! Ignoring similar problems.");
+            }
+
+            // parse a value
+            valueIsNull[i] = true;
+            if ((bytes[nullByteCur] & (1 << ((i * 2 + 1) % 8))) != 0) {
+                valueIsNull[i] = false;
+                LazyUtils.checkObjectByteInfo(((MapObjectInspector) oi).getMapValueObjectInspector(), bytes,
+                        lastElementByteEnd, recordInfo);
+                valueStart[i] = lastElementByteEnd + recordInfo.elementOffset;
+                valueLength[i] = recordInfo.elementSize;
+                lastElementByteEnd = valueStart[i] + valueLength[i];
+            }
+
+            // move onto the next null byte
+            if (3 == (i % 4)) {
+                nullByteCur++;
+            }
+        }
+
+        Arrays.fill(keyInited, 0, mapSize, false);
+        Arrays.fill(valueInited, 0, mapSize, false);
+        parsed = true;
+    }
+
+    /**
+     * Get the value object with the index without checking parsed.
+     * 
+     * @param index
+     *            The index into the array starting from 0
+     */
+    private LazyObject uncheckedGetValue(int index) {
+        if (valueIsNull[index]) {
+            return null;
+        }
+        if (!valueInited[index]) {
+            valueInited[index] = true;
+            if (valueObjects[index] == null) {
+                valueObjects[index] = LazyFactory.createLazyObject(((MapObjectInspector) oi)
+                        .getMapValueObjectInspector());
+            }
+            valueObjects[index].init(bytes, valueStart[index], valueLength[index]);
+        }
+        return valueObjects[index];
+    }
+
+    /**
+     * Get the value in the map for the key.
+     * If there are multiple matches (which is possible in the serialized
+     * format), only the first one is returned.
+     * The most efficient way to get the value for the key is to serialize the
+     * key and then try to find it in the array. We do linear search because in
+     * most cases, user only wants to get one or two values out of the map, and
+     * the cost of building up a HashMap is substantially higher.
+     * 
+     * @param key
+     *            The key object that we are looking for.
+     * @return The corresponding value object, or NULL if not found
+     */
+    public Object getMapValueElement(Object key) {
+        if (!parsed) {
+            parse();
+        }
+        // search for the key
+        for (int i = 0; i < mapSize; i++) {
+            LazyPrimitive<?, ?> lazyKeyI = uncheckedGetKey(i);
+            if (lazyKeyI == null) {
+                continue;
+            }
+            // getWritableObject() will convert LazyPrimitive to actual
+            // primitive
+            // writable objects.
+            Object keyI = lazyKeyI.getWritableObject();
+            if (keyI == null) {
+                continue;
+            }
+            if (keyI.equals(key)) {
+                // Got a match, return the value
+                LazyObject v = uncheckedGetValue(i);
+                return v == null ? v : v.getObject();
+            }
+        }
+        return null;
+    }
+
+    /**
+     * Get the key object with the index without checking parsed.
+     * 
+     * @param index
+     *            The index into the array starting from 0
+     */
+    private LazyPrimitive<?, ?> uncheckedGetKey(int index) {
+        if (keyIsNull[index]) {
+            return null;
+        }
+        if (!keyInited[index]) {
+            keyInited[index] = true;
+            if (keyObjects[index] == null) {
+                // Keys are always primitive
+                keyObjects[index] = LazyFactory
+                        .createLazyPrimitiveClass((PrimitiveObjectInspector) ((MapObjectInspector) oi)
+                                .getMapKeyObjectInspector());
+            }
+            keyObjects[index].init(bytes, keyStart[index], keyLength[index]);
+        }
+        return keyObjects[index];
+    }
+
+    /**
+     * cachedMap is reused for different calls to getMap(). But each LazyMap has
+     * a separate cachedMap so we won't overwrite the data by accident.
+     */
+    LinkedHashMap<Object, Object> cachedMap;
+
+    /**
+     * Return the map object representing this LazyMap. Note that the keyObjects
+     * will be Writable primitive objects.
+     * 
+     * @return the map object
+     */
+    public Map<Object, Object> getMap() {
+        if (!parsed) {
+            parse();
+        }
+        if (cachedMap == null) {
+            // Use LinkedHashMap to provide deterministic order
+            cachedMap = new LinkedHashMap<Object, Object>();
+        } else {
+            cachedMap.clear();
+        }
+
+        // go through each element of the map
+        for (int i = 0; i < mapSize; i++) {
+            LazyPrimitive<?, ?> lazyKey = uncheckedGetKey(i);
+            if (lazyKey == null) {
+                continue;
+            }
+            Object key = lazyKey.getObject();
+            // do not overwrite if there are duplicate keys
+            if (key != null && !cachedMap.containsKey(key)) {
+                LazyObject lazyValue = uncheckedGetValue(i);
+                Object value = (lazyValue == null ? null : lazyValue.getObject());
+                cachedMap.put(key, value);
+            }
+        }
+        return cachedMap;
+    }
+
+    /**
+     * Get the size of the map represented by this LazyMap.
+     * 
+     * @return The size of the map
+     */
+    public int getMapSize() {
+        if (!parsed) {
+            parse();
+        }
+        return mapSize;
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyNonPrimitive.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyNonPrimitive.java
new file mode 100644
index 0000000..f7ae1e3
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyNonPrimitive.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * LazyPrimitive stores a primitive Object in a LazyObject.
+ */
+public abstract class LazyNonPrimitive<OI extends ObjectInspector> extends LazyObject<OI> {
+
+    protected byte[] bytes;
+    protected int start;
+    protected int length;
+
+    /**
+     * Create a LazyNonPrimitive object with the specified ObjectInspector.
+     * 
+     * @param oi
+     *            The ObjectInspector would have to have a hierarchy of
+     *            LazyObjectInspectors with the leaf nodes being
+     *            WritableObjectInspectors. It's used both for accessing the
+     *            type hierarchy of the complex object, as well as getting meta
+     *            information (separator, nullSequence, etc) when parsing the
+     *            lazy object.
+     */
+    protected LazyNonPrimitive(OI oi) {
+        super(oi);
+        bytes = null;
+        start = 0;
+        length = 0;
+    }
+
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (bytes == null) {
+            throw new RuntimeException("bytes cannot be null!");
+        }
+        this.bytes = bytes;
+        this.start = start;
+        this.length = length;
+        assert start >= 0;
+        assert start + length <= bytes.length;
+    }
+
+    @Override
+    public Object getObject() {
+        return this;
+    }
+
+    @Override
+    public int hashCode() {
+        return LazyUtils.hashBytes(bytes, start, length);
+    }
+
+    @Override
+    public void init(IFrameTupleReference tuple) {
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyObject.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyObject.java
new file mode 100644
index 0000000..dc1dc60
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyObject.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * LazyObject stores an object in a range of bytes in a byte[].
+ * A LazyObject can represent any primitive object or hierarchical object like
+ * array, map or struct.
+ */
+public abstract class LazyObject<OI extends ObjectInspector> {
+
+    OI oi;
+
+    /**
+     * Create a LazyObject.
+     * 
+     * @param oi
+     *            Derived classes can access meta information about this Lazy
+     *            Object (e.g, separator, nullSequence, escaper) from it.
+     */
+    protected LazyObject(OI oi) {
+        this.oi = oi;
+    }
+
+    /**
+     * Set the data for this LazyObject. We take ByteArrayRef instead of byte[]
+     * so that we will be able to drop the reference to byte[] by a single
+     * assignment. The ByteArrayRef object can be reused across multiple rows.
+     * 
+     * @param bytes
+     *            The wrapper of the byte[].
+     * @param start
+     *            The start position inside the bytes.
+     * @param length
+     *            The length of the data, starting from "start"
+     * @see ByteArrayRef
+     */
+    public abstract void init(byte[] bytes, int start, int length);
+
+    public abstract void init(IFrameTupleReference tuple);
+
+    /**
+     * If the LazyObject is a primitive Object, then deserialize it and return
+     * the actual primitive Object. Otherwise (array, map, struct), return this.
+     */
+    public abstract Object getObject();
+
+    @Override
+    public abstract int hashCode();
+
+    protected OI getInspector() {
+        return oi;
+    }
+
+    protected void setInspector(OI oi) {
+        this.oi = oi;
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyPrimitive.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyPrimitive.java
new file mode 100644
index 0000000..8139c65
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyPrimitive.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * LazyPrimitive stores a primitive Object in a LazyObject.
+ */
+public abstract class LazyPrimitive<OI extends ObjectInspector, T extends Writable> extends LazyObject<OI> {
+
+    LazyPrimitive(OI oi) {
+        super(oi);
+    }
+
+    LazyPrimitive(LazyPrimitive<OI, T> copy) {
+        super(copy.oi);
+        isNull = copy.isNull;
+    }
+
+    T data;
+    boolean isNull = false;
+
+    /**
+     * Returns the primitive object represented by this LazyObject. This is
+     * useful because it can make sure we have "null" for null objects.
+     */
+    @Override
+    public Object getObject() {
+        return isNull ? null : this;
+    }
+
+    public T getWritableObject() {
+        return isNull ? null : data;
+    }
+
+    @Override
+    public String toString() {
+        return isNull ? "null" : data.toString();
+    }
+
+    @Override
+    public int hashCode() {
+        return isNull ? 0 : data.hashCode();
+    }
+
+    @Override
+    public void init(IFrameTupleReference tuple) {
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazySerDe.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazySerDe.java
new file mode 100644
index 0000000..05b82ba
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazySerDe.java
@@ -0,0 +1,460 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.ByteStream.Output;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+
+/**
+ * The LazySerDe class combines the lazy property of LazySimpleSerDe class and
+ * the binary property of BinarySortable class. Lazy means a field is not
+ * deserialized until required. Binary means a field is serialized in binary
+ * compact format.
+ */
+public class LazySerDe implements SerDe {
+
+    public static final Log LOG = LogFactory.getLog(LazySerDe.class.getName());
+
+    public LazySerDe() {
+    }
+
+    List<String> columnNames;
+    List<TypeInfo> columnTypes;
+
+    TypeInfo rowTypeInfo;
+    ObjectInspector cachedObjectInspector;
+
+    // The object for storing row data
+    LazyColumnar cachedLazyStruct;
+
+    /**
+     * Initialize the SerDe with configuration and table information.
+     */
+    @Override
+    public void initialize(Configuration conf, Properties tbl) throws SerDeException {
+        // Get column names and types
+        String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS);
+        String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES);
+        if (columnNameProperty.length() == 0) {
+            columnNames = new ArrayList<String>();
+        } else {
+            columnNames = Arrays.asList(columnNameProperty.split(","));
+        }
+        if (columnTypeProperty.length() == 0) {
+            columnTypes = new ArrayList<TypeInfo>();
+        } else {
+            columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
+        }
+        assert (columnNames.size() == columnTypes.size());
+        // Create row related objects
+        rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
+        // Create the object inspector and the lazy binary struct object
+        cachedObjectInspector = LazyUtils.getLazyObjectInspectorFromTypeInfo(rowTypeInfo, true);
+        cachedLazyStruct = (LazyColumnar) LazyFactory.createLazyObject(cachedObjectInspector);
+        // output debug info
+        LOG.debug("LazySerDe initialized with: columnNames=" + columnNames + " columnTypes=" + columnTypes);
+    }
+
+    /**
+     * Returns the ObjectInspector for the row.
+     */
+    @Override
+    public ObjectInspector getObjectInspector() throws SerDeException {
+        return cachedObjectInspector;
+    }
+
+    /**
+     * Returns the Writable Class after serialization.
+     */
+    @Override
+    public Class<? extends Writable> getSerializedClass() {
+        return BytesWritable.class;
+    }
+
+    // The wrapper for byte array
+    ByteArrayRef byteArrayRef;
+
+    /**
+     * Deserialize a table record to a Lazy struct.
+     */
+    @SuppressWarnings("deprecation")
+    @Override
+    public Object deserialize(Writable field) throws SerDeException {
+        if (byteArrayRef == null) {
+            byteArrayRef = new ByteArrayRef();
+        }
+        if (field instanceof BytesWritable) {
+            BytesWritable b = (BytesWritable) field;
+            if (b.getSize() == 0) {
+                return null;
+            }
+            // For backward-compatibility with hadoop 0.17
+            byteArrayRef.setData(b.get());
+            cachedLazyStruct.init(byteArrayRef.getData(), 0, b.getSize());
+        } else if (field instanceof Text) {
+            Text t = (Text) field;
+            if (t.getLength() == 0) {
+                return null;
+            }
+            byteArrayRef.setData(t.getBytes());
+            cachedLazyStruct.init(byteArrayRef.getData(), 0, t.getLength());
+        } else {
+            throw new SerDeException(getClass().toString() + ": expects either BytesWritable or Text object!");
+        }
+        return cachedLazyStruct;
+    }
+
+    /**
+     * The reusable output buffer and serialize byte buffer.
+     */
+    BytesWritable serializeBytesWritable = new BytesWritable();
+    ByteStream.Output serializeByteStream = new ByteStream.Output();
+
+    /**
+     * Serialize an object to a byte buffer in a binary compact way.
+     */
+    @Override
+    public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
+        // make sure it is a struct record or not
+        serializeByteStream.reset();
+
+        if (objInspector.getCategory() != Category.STRUCT) {
+            // serialize the primitive object
+            serialize(serializeByteStream, obj, objInspector);
+        } else {
+            // serialize the row as a struct
+            serializeStruct(serializeByteStream, obj, (StructObjectInspector) objInspector);
+        }
+        // return the serialized bytes
+        serializeBytesWritable.set(serializeByteStream.getData(), 0, serializeByteStream.getCount());
+        return serializeBytesWritable;
+    }
+
+    boolean nullMapKey = false;
+
+    /**
+     * Serialize a struct object without writing the byte size. This function is
+     * shared by both row serialization and struct serialization.
+     * 
+     * @param byteStream
+     *            the byte stream storing the serialization data
+     * @param obj
+     *            the struct object to serialize
+     * @param objInspector
+     *            the struct object inspector
+     */
+    private void serializeStruct(Output byteStream, Object obj, StructObjectInspector soi) {
+        // do nothing for null struct
+        if (null == obj) {
+            return;
+        }
+        /*
+         * Interleave serializing one null byte and 8 struct fields in each
+         * round, in order to support data deserialization with different table
+         * schemas
+         */
+        List<? extends StructField> fields = soi.getAllStructFieldRefs();
+        int size = fields.size();
+        int lasti = 0;
+        byte nullByte = 0;
+        for (int i = 0; i < size; i++) {
+            // set bit to 1 if a field is not null
+            if (null != soi.getStructFieldData(obj, fields.get(i))) {
+                nullByte |= 1 << (i % 8);
+            }
+            // write the null byte every eight elements or
+            // if this is the last element and serialize the
+            // corresponding 8 struct fields at the same time
+            if (7 == i % 8 || i == size - 1) {
+                serializeByteStream.write(nullByte);
+                for (int j = lasti; j <= i; j++) {
+                    serialize(serializeByteStream, soi.getStructFieldData(obj, fields.get(j)), fields.get(j)
+                            .getFieldObjectInspector());
+                }
+                lasti = i + 1;
+                nullByte = 0;
+            }
+        }
+    }
+
+    /**
+     * A recursive function that serialize an object to a byte buffer based on
+     * its object inspector.
+     * 
+     * @param byteStream
+     *            the byte stream storing the serialization data
+     * @param obj
+     *            the object to serialize
+     * @param objInspector
+     *            the object inspector
+     */
+    private void serialize(Output byteStream, Object obj, ObjectInspector objInspector) {
+
+        // do nothing for null object
+        if (null == obj) {
+            return;
+        }
+
+        switch (objInspector.getCategory()) {
+            case PRIMITIVE: {
+                PrimitiveObjectInspector poi = (PrimitiveObjectInspector) objInspector;
+                switch (poi.getPrimitiveCategory()) {
+                    case VOID: {
+                        return;
+                    }
+                    case BOOLEAN: {
+                        boolean v = ((BooleanObjectInspector) poi).get(obj);
+                        byteStream.write((byte) (v ? 1 : 0));
+                        return;
+                    }
+                    case BYTE: {
+                        ByteObjectInspector boi = (ByteObjectInspector) poi;
+                        byte v = boi.get(obj);
+                        byteStream.write(v);
+                        return;
+                    }
+                    case SHORT: {
+                        ShortObjectInspector spoi = (ShortObjectInspector) poi;
+                        short v = spoi.get(obj);
+                        byteStream.write((byte) (v >> 8));
+                        byteStream.write((byte) (v));
+                        return;
+                    }
+                    case INT: {
+                        IntObjectInspector ioi = (IntObjectInspector) poi;
+                        int v = ioi.get(obj);
+                        LazyUtils.writeVInt(byteStream, v);
+                        return;
+                    }
+                    case LONG: {
+                        LongObjectInspector loi = (LongObjectInspector) poi;
+                        long v = loi.get(obj);
+                        LazyUtils.writeVLong(byteStream, v);
+                        return;
+                    }
+                    case FLOAT: {
+                        FloatObjectInspector foi = (FloatObjectInspector) poi;
+                        int v = Float.floatToIntBits(foi.get(obj));
+                        byteStream.write((byte) (v >> 24));
+                        byteStream.write((byte) (v >> 16));
+                        byteStream.write((byte) (v >> 8));
+                        byteStream.write((byte) (v));
+                        return;
+                    }
+                    case DOUBLE: {
+                        DoubleObjectInspector doi = (DoubleObjectInspector) poi;
+                        long v = Double.doubleToLongBits(doi.get(obj));
+                        byteStream.write((byte) (v >> 56));
+                        byteStream.write((byte) (v >> 48));
+                        byteStream.write((byte) (v >> 40));
+                        byteStream.write((byte) (v >> 32));
+                        byteStream.write((byte) (v >> 24));
+                        byteStream.write((byte) (v >> 16));
+                        byteStream.write((byte) (v >> 8));
+                        byteStream.write((byte) (v));
+                        return;
+                    }
+                    case STRING: {
+                        StringObjectInspector soi = (StringObjectInspector) poi;
+                        Text t = soi.getPrimitiveWritableObject(obj);
+                        /* write byte size of the string which is a vint */
+                        int length = t.getLength();
+                        LazyUtils.writeVInt(byteStream, length);
+                        /* write string itself */
+                        byte[] data = t.getBytes();
+                        byteStream.write(data, 0, length);
+                        return;
+                    }
+                    default: {
+                        throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
+                    }
+                }
+            }
+            case LIST: {
+                ListObjectInspector loi = (ListObjectInspector) objInspector;
+                ObjectInspector eoi = loi.getListElementObjectInspector();
+
+                // 1/ reserve spaces for the byte size of the list
+                // which is a integer and takes four bytes
+                int byteSizeStart = byteStream.getCount();
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                int listStart = byteStream.getCount();
+
+                // 2/ write the size of the list as a VInt
+                int size = loi.getListLength(obj);
+                LazyUtils.writeVInt(byteStream, size);
+
+                // 3/ write the null bytes
+                byte nullByte = 0;
+                for (int eid = 0; eid < size; eid++) {
+                    // set the bit to 1 if an element is not null
+                    if (null != loi.getListElement(obj, eid)) {
+                        nullByte |= 1 << (eid % 8);
+                    }
+                    // store the byte every eight elements or
+                    // if this is the last element
+                    if (7 == eid % 8 || eid == size - 1) {
+                        byteStream.write(nullByte);
+                        nullByte = 0;
+                    }
+                }
+
+                // 4/ write element by element from the list
+                for (int eid = 0; eid < size; eid++) {
+                    serialize(byteStream, loi.getListElement(obj, eid), eoi);
+                }
+
+                // 5/ update the list byte size
+                int listEnd = byteStream.getCount();
+                int listSize = listEnd - listStart;
+                byte[] bytes = byteStream.getData();
+                bytes[byteSizeStart] = (byte) (listSize >> 24);
+                bytes[byteSizeStart + 1] = (byte) (listSize >> 16);
+                bytes[byteSizeStart + 2] = (byte) (listSize >> 8);
+                bytes[byteSizeStart + 3] = (byte) (listSize);
+
+                return;
+            }
+            case MAP: {
+                MapObjectInspector moi = (MapObjectInspector) objInspector;
+                ObjectInspector koi = moi.getMapKeyObjectInspector();
+                ObjectInspector voi = moi.getMapValueObjectInspector();
+                Map<?, ?> map = moi.getMap(obj);
+
+                // 1/ reserve spaces for the byte size of the map
+                // which is a integer and takes four bytes
+                int byteSizeStart = byteStream.getCount();
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                int mapStart = byteStream.getCount();
+
+                // 2/ write the size of the map which is a VInt
+                int size = map.size();
+                LazyUtils.writeVInt(byteStream, size);
+
+                // 3/ write the null bytes
+                int b = 0;
+                byte nullByte = 0;
+                for (Map.Entry<?, ?> entry : map.entrySet()) {
+                    // set the bit to 1 if a key is not null
+                    if (null != entry.getKey()) {
+                        nullByte |= 1 << (b % 8);
+                    } else if (!nullMapKey) {
+                        nullMapKey = true;
+                        LOG.warn("Null map key encountered! Ignoring similar problems.");
+                    }
+                    b++;
+                    // set the bit to 1 if a value is not null
+                    if (null != entry.getValue()) {
+                        nullByte |= 1 << (b % 8);
+                    }
+                    b++;
+                    // write the byte to stream every 4 key-value pairs
+                    // or if this is the last key-value pair
+                    if (0 == b % 8 || b == size * 2) {
+                        byteStream.write(nullByte);
+                        nullByte = 0;
+                    }
+                }
+
+                // 4/ write key-value pairs one by one
+                for (Map.Entry<?, ?> entry : map.entrySet()) {
+                    serialize(byteStream, entry.getKey(), koi);
+                    serialize(byteStream, entry.getValue(), voi);
+                }
+
+                // 5/ update the byte size of the map
+                int mapEnd = byteStream.getCount();
+                int mapSize = mapEnd - mapStart;
+                byte[] bytes = byteStream.getData();
+                bytes[byteSizeStart] = (byte) (mapSize >> 24);
+                bytes[byteSizeStart + 1] = (byte) (mapSize >> 16);
+                bytes[byteSizeStart + 2] = (byte) (mapSize >> 8);
+                bytes[byteSizeStart + 3] = (byte) (mapSize);
+
+                return;
+            }
+            case STRUCT: {
+                // 1/ reserve spaces for the byte size of the struct
+                // which is a integer and takes four bytes
+                int byteSizeStart = byteStream.getCount();
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                byteStream.write((byte) 0);
+                int structStart = byteStream.getCount();
+
+                // 2/ serialize the struct
+                serializeStruct(byteStream, obj, (StructObjectInspector) objInspector);
+
+                // 3/ update the byte size of the struct
+                int structEnd = byteStream.getCount();
+                int structSize = structEnd - structStart;
+                byte[] bytes = byteStream.getData();
+                bytes[byteSizeStart] = (byte) (structSize >> 24);
+                bytes[byteSizeStart + 1] = (byte) (structSize >> 16);
+                bytes[byteSizeStart + 2] = (byte) (structSize >> 8);
+                bytes[byteSizeStart + 3] = (byte) (structSize);
+
+                return;
+            }
+            default: {
+                throw new RuntimeException("Unrecognized type: " + objInspector.getCategory());
+            }
+        }
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyShort.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyShort.java
new file mode 100644
index 0000000..f493b37
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyShort.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyShortObjectInspector;
+
+/**
+ * LazyObject for storing a value of Short.
+ * <p>
+ * Part of the code is adapted from Apache Harmony Project. As with the specification, this implementation relied on code laid out in <a href="http://www.hackersdelight.org/">Henry S. Warren, Jr.'s Hacker's Delight, (Addison Wesley, 2002)</a> as well as <a href="http://aggregate.org/MAGIC/">The Aggregate's Magic Algorithms</a>.
+ * </p>
+ */
+public class LazyShort extends LazyPrimitive<LazyShortObjectInspector, ShortWritable> {
+
+    public LazyShort(LazyShortObjectInspector oi) {
+        super(oi);
+        data = new ShortWritable();
+    }
+
+    public LazyShort(LazyShort copy) {
+        super(copy);
+        data = new ShortWritable(copy.data.get());
+    }
+
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
+
+        assert (2 == length);
+        data.set(LazyUtils.byteArrayToShort(bytes, start));
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyString.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyString.java
new file mode 100644
index 0000000..0293af8
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyString.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import org.apache.hadoop.io.Text;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.VInt;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.LazyStringObjectInspector;
+
+/**
+ * LazyObject for storing a value of String.
+ */
+public class LazyString extends LazyPrimitive<LazyStringObjectInspector, Text> {
+
+    public LazyString(LazyStringObjectInspector oi) {
+        super(oi);
+        data = new Text();
+    }
+
+    public LazyString(LazyString copy) {
+        super(copy);
+        data = new Text(copy.data);
+    }
+
+    VInt vInt = new LazyUtils.VInt();
+
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        if (length == 0) {
+            isNull = true;
+            return;
+        } else
+            isNull = false;
+
+        // get the byte length of the string
+        LazyUtils.readVInt(bytes, start, vInt);
+        if (vInt.value + vInt.length != length)
+            throw new IllegalStateException("parse string: length mismatch, expected " + (vInt.value + vInt.length)
+                    + " but get " + length);
+        assert (length - vInt.length > -1);
+        data.set(bytes, start + vInt.length, length - vInt.length);
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyStruct.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyStruct.java
new file mode 100644
index 0000000..47e95e4
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyStruct.java
@@ -0,0 +1,234 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils.RecordInfo;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyStructObjectInspector;
+
+/**
+ * LazyStruct is serialized as follows: start A B A B A B end bytes[] ->
+ * |-----|---------|--- ... ---|-----|---------|
+ * Section A is one null-byte, corresponding to eight struct fields in Section
+ * B. Each bit indicates whether the corresponding field is null (0) or not null
+ * (1). Each field is a LazyObject.
+ * Following B, there is another section A and B. This pattern repeats until the
+ * all struct fields are serialized.
+ */
+public class LazyStruct extends LazyNonPrimitive<LazyStructObjectInspector> {
+
+    private static Log LOG = LogFactory.getLog(LazyStruct.class.getName());
+
+    /**
+     * Whether the data is already parsed or not.
+     */
+    boolean parsed;
+
+    /**
+     * The fields of the struct.
+     */
+    @SuppressWarnings("rawtypes")
+    LazyObject[] fields;
+
+    /**
+     * Whether a field is initialized or not.
+     */
+    boolean[] fieldInited;
+
+    /**
+     * Whether a field is null or not. Because length is 0 does not means the
+     * field is null. In particular, a 0-length string is not null.
+     */
+    boolean[] fieldIsNull;
+
+    /**
+     * The start positions and lengths of struct fields. Only valid when the
+     * data is parsed.
+     */
+    int[] fieldStart;
+    int[] fieldLength;
+
+    /**
+     * Construct a LazyStruct object with an ObjectInspector.
+     */
+    protected LazyStruct(LazyStructObjectInspector oi) {
+        super(oi);
+    }
+
+    @Override
+    public void init(byte[] bytes, int start, int length) {
+        super.init(bytes, start, length);
+        parsed = false;
+    }
+
+    RecordInfo recordInfo = new LazyUtils.RecordInfo();
+    boolean missingFieldWarned = false;
+    boolean extraFieldWarned = false;
+
+    /**
+     * Parse the byte[] and fill fieldStart, fieldLength, fieldInited and
+     * fieldIsNull.
+     */
+    private void parse() {
+
+        List<? extends StructField> fieldRefs = ((StructObjectInspector) oi).getAllStructFieldRefs();
+
+        if (fields == null) {
+            fields = new LazyObject[fieldRefs.size()];
+            for (int i = 0; i < fields.length; i++) {
+                ObjectInspector insp = fieldRefs.get(i).getFieldObjectInspector();
+                fields[i] = insp == null ? null : LazyFactory.createLazyObject(insp);
+            }
+            fieldInited = new boolean[fields.length];
+            fieldIsNull = new boolean[fields.length];
+            fieldStart = new int[fields.length];
+            fieldLength = new int[fields.length];
+        }
+
+        /**
+         * Please note that one null byte is followed by eight fields, then more
+         * null byte and fields.
+         */
+
+        int fieldId = 0;
+        int structByteEnd = start + length;
+
+        byte nullByte = bytes[start];
+        int lastFieldByteEnd = start + 1;
+        // Go through all bytes in the byte[]
+        for (int i = 0; i < fields.length; i++) {
+            fieldIsNull[i] = true;
+            if ((nullByte & (1 << (i % 8))) != 0) {
+                fieldIsNull[i] = false;
+                LazyUtils.checkObjectByteInfo(fieldRefs.get(i).getFieldObjectInspector(), bytes, lastFieldByteEnd,
+                        recordInfo);
+                fieldStart[i] = lastFieldByteEnd + recordInfo.elementOffset;
+                fieldLength[i] = recordInfo.elementSize;
+                lastFieldByteEnd = fieldStart[i] + fieldLength[i];
+            }
+
+            // count how many fields are there
+            if (lastFieldByteEnd <= structByteEnd) {
+                fieldId++;
+            }
+            // next byte is a null byte if there are more bytes to go
+            if (7 == (i % 8)) {
+                if (lastFieldByteEnd < structByteEnd) {
+                    nullByte = bytes[lastFieldByteEnd];
+                    lastFieldByteEnd++;
+                } else {
+                    // otherwise all null afterwards
+                    nullByte = 0;
+                    lastFieldByteEnd++;
+                }
+            }
+        }
+
+        // Extra bytes at the end?
+        if (!extraFieldWarned && lastFieldByteEnd < structByteEnd) {
+            extraFieldWarned = true;
+            LOG.warn("Extra bytes detected at the end of the row! Ignoring similar " + "problems.");
+        }
+
+        // Missing fields?
+        if (!missingFieldWarned && lastFieldByteEnd > structByteEnd) {
+            missingFieldWarned = true;
+            LOG.warn("Missing fields! Expected " + fields.length + " fields but " + "only got " + fieldId
+                    + "! Ignoring similar problems.");
+        }
+
+        Arrays.fill(fieldInited, false);
+        parsed = true;
+    }
+
+    /**
+     * Get one field out of the struct.
+     * If the field is a primitive field, return the actual object. Otherwise
+     * return the LazyObject. This is because PrimitiveObjectInspector does not
+     * have control over the object used by the user - the user simply directly
+     * use the Object instead of going through Object
+     * PrimitiveObjectInspector.get(Object).
+     * 
+     * @param fieldID
+     *            The field ID
+     * @return The field as a LazyObject
+     */
+    public Object getField(int fieldID) {
+        if (!parsed) {
+            parse();
+        }
+        return uncheckedGetField(fieldID);
+    }
+
+    /**
+     * Get the field out of the row without checking parsed. This is called by
+     * both getField and getFieldsAsList.
+     * 
+     * @param fieldID
+     *            The id of the field starting from 0.
+     * @return The value of the field
+     */
+    private Object uncheckedGetField(int fieldID) {
+        // Test the length first so in most cases we avoid doing a byte[]
+        // comparison.
+        if (fieldIsNull[fieldID]) {
+            return null;
+        }
+        if (!fieldInited[fieldID]) {
+            fieldInited[fieldID] = true;
+            fields[fieldID].init(bytes, fieldStart[fieldID], fieldLength[fieldID]);
+        }
+        return fields[fieldID].getObject();
+    }
+
+    ArrayList<Object> cachedList;
+
+    /**
+     * Get the values of the fields as an ArrayList.
+     * 
+     * @return The values of the fields as an ArrayList.
+     */
+    public ArrayList<Object> getFieldsAsList() {
+        if (!parsed) {
+            parse();
+        }
+        if (cachedList == null) {
+            cachedList = new ArrayList<Object>();
+        } else {
+            cachedList.clear();
+        }
+        for (int i = 0; i < fields.length; i++) {
+            cachedList.add(uncheckedGetField(i));
+        }
+        return cachedList;
+    }
+
+    @Override
+    public Object getObject() {
+        return this;
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyUtils.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyUtils.java
new file mode 100644
index 0000000..6554ccc
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/LazyUtils.java
@@ -0,0 +1,503 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.hadoop.hive.serde2.ByteStream.Output;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.io.WritableUtils;
+
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.LazyObjectInspectorFactory;
+import edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+ * LazyUtils.
+ */
+public final class LazyUtils {
+
+    /**
+     * Convert the byte array to an int starting from the given offset. Refer to
+     * code by aeden on DZone Snippets:
+     * 
+     * @param b
+     *            the byte array
+     * @param offset
+     *            the array offset
+     * @return the integer
+     */
+    public static int byteArrayToInt(byte[] b, int offset) {
+        int value = 0;
+        for (int i = 0; i < 4; i++) {
+            int shift = (4 - 1 - i) * 8;
+            value += (b[i + offset] & 0x000000FF) << shift;
+        }
+        return value;
+    }
+
+    /**
+     * Convert the byte array to a long starting from the given offset.
+     * 
+     * @param b
+     *            the byte array
+     * @param offset
+     *            the array offset
+     * @return the long
+     */
+    public static long byteArrayToLong(byte[] b, int offset) {
+        long value = 0;
+        for (int i = 0; i < 8; i++) {
+            int shift = (8 - 1 - i) * 8;
+            value += ((long) (b[i + offset] & 0x00000000000000FF)) << shift;
+        }
+        return value;
+    }
+
+    /**
+     * Convert the byte array to a short starting from the given offset.
+     * 
+     * @param b
+     *            the byte array
+     * @param offset
+     *            the array offset
+     * @return the short
+     */
+    public static short byteArrayToShort(byte[] b, int offset) {
+        short value = 0;
+        value += (b[offset] & 0x000000FF) << 8;
+        value += (b[offset + 1] & 0x000000FF);
+        return value;
+    }
+
+    /**
+     * Record is the unit that data is serialized in. A record includes two
+     * parts. The first part stores the size of the element and the second part
+     * stores the real element. size element record ->
+     * |----|-------------------------|
+     * A RecordInfo stores two information of a record, the size of the "size"
+     * part which is the element offset and the size of the element part which
+     * is element size.
+     */
+    public static class RecordInfo {
+        public RecordInfo() {
+            elementOffset = 0;
+            elementSize = 0;
+        }
+
+        public byte elementOffset;
+        public int elementSize;
+
+        @Override
+        public String toString() {
+            return "(" + elementOffset + ", " + elementSize + ")";
+        }
+    }
+
+    static VInt vInt = new LazyUtils.VInt();
+
+    /**
+     * Check a particular field and set its size and offset in bytes based on
+     * the field type and the bytes arrays.
+     * For void, boolean, byte, short, int, long, float and double, there is no
+     * offset and the size is fixed. For string, map, list, struct, the first
+     * four bytes are used to store the size. So the offset is 4 and the size is
+     * computed by concating the first four bytes together. The first four bytes
+     * are defined with respect to the offset in the bytes arrays.
+     * 
+     * @param objectInspector
+     *            object inspector of the field
+     * @param bytes
+     *            bytes arrays store the table row
+     * @param offset
+     *            offset of this field
+     * @param recordInfo
+     *            modify this byteinfo object and return it
+     */
+    public static void checkObjectByteInfo(ObjectInspector objectInspector, byte[] bytes, int offset,
+            RecordInfo recordInfo) {
+        Category category = objectInspector.getCategory();
+        switch (category) {
+            case PRIMITIVE:
+                PrimitiveCategory primitiveCategory = ((PrimitiveObjectInspector) objectInspector)
+                        .getPrimitiveCategory();
+                switch (primitiveCategory) {
+                    case VOID:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = 0;
+                        break;
+                    case BOOLEAN:
+                    case BYTE:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = 1;
+                        break;
+                    case SHORT:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = 2;
+                        break;
+                    case FLOAT:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = 4;
+                        break;
+                    case DOUBLE:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = 8;
+                        break;
+                    case INT:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = WritableUtils.decodeVIntSize(bytes[offset]);
+                        break;
+                    case LONG:
+                        recordInfo.elementOffset = 0;
+                        recordInfo.elementSize = WritableUtils.decodeVIntSize(bytes[offset]);
+                        break;
+                    case STRING:
+                        // using vint instead of 4 bytes
+                        LazyUtils.readVInt(bytes, offset, vInt);
+                        recordInfo.elementOffset = vInt.length;
+                        recordInfo.elementSize = vInt.value;
+                        break;
+                    default: {
+                        throw new RuntimeException("Unrecognized primitive type: " + primitiveCategory);
+                    }
+                }
+                break;
+            case LIST:
+            case MAP:
+            case STRUCT:
+                recordInfo.elementOffset = 4;
+                recordInfo.elementSize = LazyUtils.byteArrayToInt(bytes, offset);
+                break;
+            default: {
+                throw new RuntimeException("Unrecognized non-primitive type: " + category);
+            }
+        }
+    }
+
+    /**
+     * A zero-compressed encoded long.
+     */
+    public static class VLong {
+        public VLong() {
+            value = 0;
+            length = 0;
+        }
+
+        public long value;
+        public byte length;
+    };
+
+    /**
+     * Reads a zero-compressed encoded long from a byte array and returns it.
+     * 
+     * @param bytes
+     *            the byte array
+     * @param offset
+     *            offset of the array to read from
+     * @param vlong
+     *            storing the deserialized long and its size in byte
+     */
+    public static void readVLong(byte[] bytes, int offset, VLong vlong) {
+        byte firstByte = bytes[offset];
+        vlong.length = (byte) WritableUtils.decodeVIntSize(firstByte);
+        if (vlong.length == 1) {
+            vlong.value = firstByte;
+            return;
+        }
+        long i = 0;
+        for (int idx = 0; idx < vlong.length - 1; idx++) {
+            byte b = bytes[offset + 1 + idx];
+            i = i << 8;
+            i = i | (b & 0xFF);
+        }
+        vlong.value = (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1L) : i);
+    }
+
+    /**
+     * A zero-compressed encoded integer.
+     */
+    public static class VInt implements Serializable {
+        private static final long serialVersionUID = 1L;
+
+        public VInt() {
+            value = 0;
+            length = 0;
+        }
+
+        public int value;
+        public byte length;
+    };
+
+    /**
+     * Reads a zero-compressed encoded int from a byte array and returns it.
+     * 
+     * @param bytes
+     *            the byte array
+     * @param offset
+     *            offset of the array to read from
+     * @param vInt
+     *            storing the deserialized int and its size in byte
+     */
+    public static void readVInt(byte[] bytes, int offset, VInt vInt) {
+        byte firstByte = bytes[offset];
+        vInt.length = (byte) WritableUtils.decodeVIntSize(firstByte);
+        if (vInt.length == 1) {
+            vInt.value = firstByte;
+            return;
+        }
+        int i = 0;
+        for (int idx = 0; idx < vInt.length - 1; idx++) {
+            byte b = bytes[offset + 1 + idx];
+            i = i << 8;
+            i = i | (b & 0xFF);
+        }
+        vInt.value = (WritableUtils.isNegativeVInt(firstByte) ? (i ^ -1) : i);
+    }
+
+    /**
+     * Writes a zero-compressed encoded int to a byte array.
+     * 
+     * @param byteStream
+     *            the byte array/stream
+     * @param i
+     *            the int
+     */
+    public static void writeVInt(Output byteStream, int i) {
+        writeVLong(byteStream, i);
+    }
+
+    /**
+     * Write a zero-compressed encoded long to a byte array.
+     * 
+     * @param byteStream
+     *            the byte array/stream
+     * @param l
+     *            the long
+     */
+    public static void writeVLong(Output byteStream, long l) {
+        if (l >= -112 && l <= 127) {
+            byteStream.write((byte) l);
+            return;
+        }
+
+        int len = -112;
+        if (l < 0) {
+            l ^= -1L; // take one's complement'
+            len = -120;
+        }
+
+        long tmp = l;
+        while (tmp != 0) {
+            tmp = tmp >> 8;
+            len--;
+        }
+
+        byteStream.write((byte) len);
+
+        len = (len < -120) ? -(len + 120) : -(len + 112);
+
+        for (int idx = len; idx != 0; idx--) {
+            int shiftbits = (idx - 1) * 8;
+            long mask = 0xFFL << shiftbits;
+            byteStream.write((byte) ((l & mask) >> shiftbits));
+        }
+    }
+
+    static Map<TypeInfo, ObjectInspector> cachedLazyObjectInspector = new ConcurrentHashMap<TypeInfo, ObjectInspector>();
+
+    /**
+     * Returns the lazy binary object inspector that can be used to inspect an
+     * lazy binary object of that typeInfo
+     * For primitive types, we use the standard writable object inspector.
+     */
+    public static ObjectInspector getLazyObjectInspectorFromTypeInfo(TypeInfo typeInfo, boolean topLevel) {
+        if (typeInfo == null)
+            throw new IllegalStateException("illegal type null ");
+        ObjectInspector result = cachedLazyObjectInspector.get(typeInfo);
+        if (result == null) {
+            switch (typeInfo.getCategory()) {
+                case PRIMITIVE: {
+                    result = PrimitiveObjectInspectorFactory
+                            .getPrimitiveLazyObjectInspector(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory());
+                    break;
+                }
+                case LIST: {
+                    ObjectInspector elementObjectInspector = getLazyObjectInspectorFromTypeInfo(
+                            ((ListTypeInfo) typeInfo).getListElementTypeInfo(), false);
+                    result = LazyObjectInspectorFactory.getLazyListObjectInspector(elementObjectInspector);
+                    break;
+                }
+                case MAP: {
+                    MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
+                    ObjectInspector keyObjectInspector = getLazyObjectInspectorFromTypeInfo(
+                            mapTypeInfo.getMapKeyTypeInfo(), false);
+                    ObjectInspector valueObjectInspector = getLazyObjectInspectorFromTypeInfo(
+                            mapTypeInfo.getMapValueTypeInfo(), false);
+                    result = LazyObjectInspectorFactory.getLazyMapObjectInspector(keyObjectInspector,
+                            valueObjectInspector);
+                    break;
+                }
+                case STRUCT: {
+                    StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
+                    List<String> fieldNames = structTypeInfo.getAllStructFieldNames();
+                    List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
+                    List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(fieldTypeInfos.size());
+
+                    for (int i = 0; i < fieldTypeInfos.size(); i++) {
+                        fieldObjectInspectors.add(getLazyObjectInspectorFromTypeInfo(fieldTypeInfos.get(i), false));
+                    }
+
+                    // if it is top level then create columnar
+                    if (topLevel)
+                        result = LazyObjectInspectorFactory.getLazyColumnarObjectInspector(fieldNames,
+                                fieldObjectInspectors);
+                    // if it is not top level then create struct
+                    else
+                        result = LazyObjectInspectorFactory.getLazyStructObjectInspector(fieldNames,
+                                fieldObjectInspectors);
+
+                    break;
+                }
+                default: {
+                    result = null;
+                }
+            }
+            cachedLazyObjectInspector.put(typeInfo, result);
+        }
+        return result;
+    }
+
+    /**
+     * get top-level lazy object inspector
+     * 
+     * @param fieldNames
+     * @param fieldTypeInfos
+     * @return
+     */
+    public static ObjectInspector getLazyObjectInspector(List<String> fieldNames, List<TypeInfo> fieldTypeInfos) {
+        List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(fieldTypeInfos.size());
+        for (int i = 0; i < fieldTypeInfos.size(); i++) {
+            fieldObjectInspectors.add(getLazyObjectInspectorFromTypeInfo(fieldTypeInfos.get(i), false));
+        }
+
+        return LazyObjectInspectorFactory.getLazyColumnarObjectInspector(fieldNames, fieldObjectInspectors);
+    }
+
+    private LazyUtils() {
+        // prevent instantiation
+    }
+
+    /**
+     * Returns -1 if the first byte sequence is lexicographically less than the
+     * second; returns +1 if the second byte sequence is lexicographically less
+     * than the first; otherwise return 0.
+     */
+    public static int compare(byte[] b1, int start1, int length1, byte[] b2, int start2, int length2) {
+
+        int min = Math.min(length1, length2);
+
+        for (int i = 0; i < min; i++) {
+            if (b1[start1 + i] == b2[start2 + i]) {
+                continue;
+            }
+            if (b1[start1 + i] < b2[start2 + i]) {
+                return -1;
+            } else {
+                return 1;
+            }
+        }
+
+        if (length1 < length2) {
+            return -1;
+        }
+        if (length1 > length2) {
+            return 1;
+        }
+        return 0;
+    }
+
+    public static int hashBytes(byte[] data, int start, int len) {
+        int hash = 1;
+        for (int i = start; i < len; i++) {
+            hash = (31 * hash) + data[i];
+        }
+        return hash;
+    }
+
+    /**
+     * Writes a zero-compressed encoded int to a byte array.
+     * 
+     * @param byteStream
+     *            the byte array/stream
+     * @param i
+     *            the int
+     */
+    public static void writeVInt(DataOutput byteStream, int i) throws IOException {
+        writeVLong(byteStream, i);
+    }
+
+    /**
+     * Write a zero-compressed encoded long to a byte array.
+     * 
+     * @param byteStream
+     *            the byte array/stream
+     * @param l
+     *            the long
+     */
+    public static void writeVLong(DataOutput byteStream, long l) throws IOException {
+        if (l >= -112 && l <= 127) {
+            byteStream.write((byte) l);
+            return;
+        }
+
+        int len = -112;
+        if (l < 0) {
+            l ^= -1L; // take one's complement'
+            len = -120;
+        }
+
+        long tmp = l;
+        while (tmp != 0) {
+            tmp = tmp >> 8;
+            len--;
+        }
+
+        byteStream.write((byte) len);
+
+        len = (len < -120) ? -(len + 120) : -(len + 112);
+
+        for (int idx = len; idx != 0; idx--) {
+            int shiftbits = (idx - 1) * 8;
+            long mask = 0xFFL << shiftbits;
+            byteStream.write((byte) ((l & mask) >> shiftbits));
+        }
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyColumnarObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyColumnarObjectInspector.java
new file mode 100644
index 0000000..b1ca622
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyColumnarObjectInspector.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyColumnar;
+
+/**
+ * ObjectInspector for LazyColumnar.
+ * 
+ * @see LazyColumnar
+ */
+public class LazyColumnarObjectInspector extends StandardStructObjectInspector implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+
+    public LazyColumnarObjectInspector(List<String> structFieldNames, List<ObjectInspector> structFieldObjectInspectors) {
+        super(structFieldNames, structFieldObjectInspectors);
+    }
+
+    public LazyColumnarObjectInspector(List<StructField> fields) {
+        super(fields);
+    }
+
+    @Override
+    public Object getStructFieldData(Object data, StructField fieldRef) {
+        if (data == null) {
+            return null;
+        }
+        LazyColumnar struct = (LazyColumnar) data;
+        MyField f = (MyField) fieldRef;
+
+        int fieldID = f.getFieldID();
+        assert (fieldID >= 0 && fieldID < fields.size());
+
+        Object column = struct.getField(fieldID);
+        return column;
+    }
+
+    @Override
+    public List<Object> getStructFieldsDataAsList(Object data) {
+        if (data == null) {
+            return null;
+        }
+        LazyColumnar struct = (LazyColumnar) data;
+        return struct.getFieldsAsList();
+    }
+
+    public String toString() {
+        String str = "";
+        for (MyField f : fields) {
+            str += f.getFieldName() + ":" + f.getFieldObjectInspector().getTypeName() + "  ";
+        }
+        return str;
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyListObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyListObjectInspector.java
new file mode 100644
index 0000000..aaa5d66
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyListObjectInspector.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyArray;
+
+/**
+ * ObjectInspector for LazyList.
+ */
+public class LazyListObjectInspector extends StandardListObjectInspector {
+
+    protected LazyListObjectInspector(ObjectInspector listElementObjectInspector) {
+        super(listElementObjectInspector);
+    }
+
+    @Override
+    public List<?> getList(Object data) {
+        if (data == null) {
+            return null;
+        }
+        LazyArray array = (LazyArray) data;
+        return array.getList();
+    }
+
+    @Override
+    public Object getListElement(Object data, int index) {
+        if (data == null) {
+            return null;
+        }
+        LazyArray array = (LazyArray) data;
+        return array.getListElementObject(index);
+    }
+
+    @Override
+    public int getListLength(Object data) {
+        if (data == null) {
+            return -1;
+        }
+        LazyArray array = (LazyArray) data;
+        return array.getListLength();
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyMapObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyMapObjectInspector.java
new file mode 100644
index 0000000..1b0c412
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyMapObjectInspector.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardMapObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyMap;
+
+/**
+ * ObjectInspector for LazyMap.
+ * 
+ * @see LazyMap
+ */
+public class LazyMapObjectInspector extends StandardMapObjectInspector {
+
+    protected LazyMapObjectInspector(ObjectInspector mapKeyObjectInspector, ObjectInspector mapValueObjectInspector) {
+        super(mapKeyObjectInspector, mapValueObjectInspector);
+    }
+
+    @Override
+    public Map<?, ?> getMap(Object data) {
+        if (data == null) {
+            return null;
+        }
+        return ((LazyMap) data).getMap();
+    }
+
+    @Override
+    public int getMapSize(Object data) {
+        if (data == null) {
+            return -1;
+        }
+        return ((LazyMap) data).getMapSize();
+    }
+
+    @Override
+    public Object getMapValueElement(Object data, Object key) {
+        if (data == null) {
+            return -1;
+        }
+        return ((LazyMap) data).getMapValueElement(key);
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyObjectInspectorFactory.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyObjectInspectorFactory.java
new file mode 100644
index 0000000..8093c94
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyObjectInspectorFactory.java
@@ -0,0 +1,82 @@
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+/**
+ * ObjectInspectorFactory is the primary way to create new ObjectInspector
+ * instances.
+ * SerDe classes should call the static functions in this library to create an
+ * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
+ * The reason of having caches here is that ObjectInspectors do not have an
+ * internal state - so ObjectInspectors with the same construction parameters
+ * should result in exactly the same ObjectInspector.
+ */
+
+public final class LazyObjectInspectorFactory {
+
+    static ConcurrentHashMap<ArrayList<Object>, LazyColumnarObjectInspector> cachedLazyColumnarObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyColumnarObjectInspector>();
+
+    static ConcurrentHashMap<ArrayList<Object>, LazyStructObjectInspector> cachedLazyStructObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyStructObjectInspector>();
+
+    static ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector> cachedLazyListObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector>();
+
+    static ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector> cachedLazyMapObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector>();
+
+    public static LazyColumnarObjectInspector getLazyColumnarObjectInspector(List<String> structFieldNames,
+            List<ObjectInspector> structFieldObjectInspectors) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(structFieldNames);
+        signature.add(structFieldObjectInspectors);
+        LazyColumnarObjectInspector result = cachedLazyColumnarObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyColumnarObjectInspector(structFieldNames, structFieldObjectInspectors);
+            cachedLazyColumnarObjectInspector.put(signature, result);
+        }
+        return result;
+    }
+
+    public static LazyStructObjectInspector getLazyStructObjectInspector(List<String> structFieldNames,
+            List<ObjectInspector> structFieldObjectInspectors) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(structFieldNames);
+        signature.add(structFieldObjectInspectors);
+        LazyStructObjectInspector result = cachedLazyStructObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyStructObjectInspector(structFieldNames, structFieldObjectInspectors);
+            cachedLazyStructObjectInspector.put(signature, result);
+        }
+        return result;
+    }
+
+    public static LazyListObjectInspector getLazyListObjectInspector(ObjectInspector listElementInspector) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(listElementInspector);
+        LazyListObjectInspector result = cachedLazyListObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyListObjectInspector(listElementInspector);
+            cachedLazyListObjectInspector.put(signature, result);
+        }
+        return result;
+    }
+
+    public static LazyMapObjectInspector getLazyMapObjectInspector(ObjectInspector keyInspector,
+            ObjectInspector valueInspector) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(keyInspector);
+        signature.add(valueInspector);
+        LazyMapObjectInspector result = cachedLazyMapObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyMapObjectInspector(keyInspector, valueInspector);
+            cachedLazyMapObjectInspector.put(signature, result);
+        }
+        return result;
+    }
+
+    private LazyObjectInspectorFactory() {
+        // prevent instantiation
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyStructObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyStructObjectInspector.java
new file mode 100644
index 0000000..ad70d4c
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/LazyStructObjectInspector.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyStruct;
+
+/**
+ * ObjectInspector for LazyStruct.
+ * 
+ * @see LazyStruct
+ */
+public class LazyStructObjectInspector extends StandardStructObjectInspector {
+
+    protected LazyStructObjectInspector(List<String> structFieldNames, List<ObjectInspector> structFieldObjectInspectors) {
+        super(structFieldNames, structFieldObjectInspectors);
+    }
+
+    protected LazyStructObjectInspector(List<StructField> fields) {
+        super(fields);
+    }
+
+    @Override
+    public Object getStructFieldData(Object data, StructField fieldRef) {
+        if (data == null) {
+            return null;
+        }
+        LazyStruct struct = (LazyStruct) data;
+        MyField f = (MyField) fieldRef;
+
+        int fieldID = f.getFieldID();
+        assert (fieldID >= 0 && fieldID < fields.size());
+
+        return struct.getField(fieldID);
+    }
+
+    @Override
+    public List<Object> getStructFieldsDataAsList(Object data) {
+        if (data == null) {
+            return null;
+        }
+        LazyStruct struct = (LazyStruct) data;
+        return struct.getFieldsAsList();
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java
new file mode 100644
index 0000000..eaa2bbc
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/AbstractPrimitiveLazyObjectInspector.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyPrimitive;
+
+/**
+ * An AbstractPrimitiveLazyObjectInspector for a LazyPrimitive object.
+ */
+public abstract class AbstractPrimitiveLazyObjectInspector<T extends Writable> extends AbstractPrimitiveObjectInspector {
+
+    protected AbstractPrimitiveLazyObjectInspector(PrimitiveTypeEntry typeEntry) {
+        super(typeEntry);
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public T getPrimitiveWritableObject(Object o) {
+        if (o == null)
+            System.out.println("sth. wrong");
+        return o == null ? null : ((LazyPrimitive<?, T>) o).getWritableObject();
+    }
+
+    @Override
+    public boolean preferWritable() {
+        return true;
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java
new file mode 100644
index 0000000..7927c1e
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyBooleanObjectInspector.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.BooleanWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyBoolean;
+
+/**
+ * A WritableBooleanObjectInspector inspects a BooleanWritable Object.
+ */
+public class LazyBooleanObjectInspector extends AbstractPrimitiveLazyObjectInspector<BooleanWritable> implements
+        BooleanObjectInspector {
+
+    LazyBooleanObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.booleanTypeEntry);
+    }
+
+    @Override
+    public boolean get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
+
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyBoolean((LazyBoolean) o);
+    }
+
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Boolean.valueOf(get(o));
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyByteObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyByteObjectInspector.java
new file mode 100644
index 0000000..10a881c
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyByteObjectInspector.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.ByteWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyByte;
+
+/**
+ * A WritableByteObjectInspector inspects a ByteWritable Object.
+ */
+public class LazyByteObjectInspector extends AbstractPrimitiveLazyObjectInspector<ByteWritable> implements
+        ByteObjectInspector {
+
+    LazyByteObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.byteTypeEntry);
+    }
+
+    @Override
+    public byte get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
+
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyByte((LazyByte) o);
+    }
+
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Byte.valueOf(get(o));
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java
new file mode 100644
index 0000000..9f98b56
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyDoubleObjectInspector.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.DoubleWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyDouble;
+
+/**
+ * A WritableDoubleObjectInspector inspects a DoubleWritable Object.
+ */
+public class LazyDoubleObjectInspector extends AbstractPrimitiveLazyObjectInspector<DoubleWritable> implements
+        DoubleObjectInspector {
+
+    LazyDoubleObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.doubleTypeEntry);
+    }
+
+    @Override
+    public double get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
+
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyDouble((LazyDouble) o);
+    }
+
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Double.valueOf(get(o));
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyFloatObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyFloatObjectInspector.java
new file mode 100644
index 0000000..bf3e9a2
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyFloatObjectInspector.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.FloatWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyFloat;
+
+/**
+ * A FloatObjectInspector inspects a FloatWritable Object.
+ */
+public class LazyFloatObjectInspector extends AbstractPrimitiveLazyObjectInspector<FloatWritable> implements
+        FloatObjectInspector {
+
+    LazyFloatObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.floatTypeEntry);
+    }
+
+    @Override
+    public float get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
+
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyFloat((LazyFloat) o);
+    }
+
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Float.valueOf(get(o));
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyIntObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyIntObjectInspector.java
new file mode 100644
index 0000000..87bcb0d
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyIntObjectInspector.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.IntWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyInteger;
+
+/**
+ * A WritableIntObjectInspector inspects a IntWritable Object.
+ */
+public class LazyIntObjectInspector extends AbstractPrimitiveLazyObjectInspector<IntWritable> implements
+        IntObjectInspector {
+
+    LazyIntObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.intTypeEntry);
+    }
+
+    @Override
+    public int get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
+
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyInteger((LazyInteger) o);
+    }
+
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Integer.valueOf(get(o));
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyLongObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyLongObjectInspector.java
new file mode 100644
index 0000000..06b5d3c
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyLongObjectInspector.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.io.LongWritable;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyLong;
+
+/**
+ * A WritableLongObjectInspector inspects a LongWritable Object.
+ */
+public class LazyLongObjectInspector extends AbstractPrimitiveLazyObjectInspector<LongWritable> implements
+        LongObjectInspector {
+
+    LazyLongObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.longTypeEntry);
+    }
+
+    @Override
+    public long get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
+
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyLong((LazyLong) o);
+    }
+
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Long.valueOf(get(o));
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
new file mode 100644
index 0000000..5d7ef48
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import java.util.ArrayList;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+
+/**
+ * LazyPrimitiveObjectInspectorFactory is the primary way to create new
+ * ObjectInspector instances.
+ * SerDe classes should call the static functions in this library to create an
+ * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
+ * The reason of having caches here is that ObjectInspector is because
+ * ObjectInspectors do not have an internal state - so ObjectInspectors with the
+ * same construction parameters should result in exactly the same
+ * ObjectInspector.
+ */
+public final class LazyPrimitiveObjectInspectorFactory {
+
+    public static final LazyBooleanObjectInspector LAZY_BOOLEAN_OBJECT_INSPECTOR = new LazyBooleanObjectInspector();
+    public static final LazyByteObjectInspector LAZY_BYTE_OBJECT_INSPECTOR = new LazyByteObjectInspector();
+    public static final LazyShortObjectInspector LAZY_SHORT_OBJECT_INSPECTOR = new LazyShortObjectInspector();
+    public static final LazyIntObjectInspector LAZY_INT_OBJECT_INSPECTOR = new LazyIntObjectInspector();
+    public static final LazyLongObjectInspector LAZY_LONG_OBJECT_INSPECTOR = new LazyLongObjectInspector();
+    public static final LazyFloatObjectInspector LAZY_FLOAT_OBJECT_INSPECTOR = new LazyFloatObjectInspector();
+    public static final LazyDoubleObjectInspector LAZY_DOUBLE_OBJECT_INSPECTOR = new LazyDoubleObjectInspector();
+    public static final LazyVoidObjectInspector LAZY_VOID_OBJECT_INSPECTOR = new LazyVoidObjectInspector();
+
+    static ConcurrentHashMap<ArrayList<Object>, LazyStringObjectInspector> cachedLazyStringObjectInspector = new ConcurrentHashMap<ArrayList<Object>, LazyStringObjectInspector>();
+
+    public static LazyStringObjectInspector getLazyStringObjectInspector(boolean escaped, byte escapeChar) {
+        ArrayList<Object> signature = new ArrayList<Object>();
+        signature.add(Boolean.valueOf(escaped));
+        signature.add(Byte.valueOf(escapeChar));
+        LazyStringObjectInspector result = cachedLazyStringObjectInspector.get(signature);
+        if (result == null) {
+            result = new LazyStringObjectInspector(escaped, escapeChar);
+            cachedLazyStringObjectInspector.put(signature, result);
+        }
+        return result;
+    }
+
+    public static AbstractPrimitiveLazyObjectInspector<?> getLazyObjectInspector(PrimitiveCategory primitiveCategory,
+            boolean escaped, byte escapeChar) {
+
+        switch (primitiveCategory) {
+            case BOOLEAN:
+                return LAZY_BOOLEAN_OBJECT_INSPECTOR;
+            case BYTE:
+                return LAZY_BYTE_OBJECT_INSPECTOR;
+            case SHORT:
+                return LAZY_SHORT_OBJECT_INSPECTOR;
+            case INT:
+                return LAZY_INT_OBJECT_INSPECTOR;
+            case LONG:
+                return LAZY_LONG_OBJECT_INSPECTOR;
+            case FLOAT:
+                return LAZY_FLOAT_OBJECT_INSPECTOR;
+            case DOUBLE:
+                return LAZY_DOUBLE_OBJECT_INSPECTOR;
+            case STRING:
+                return getLazyStringObjectInspector(escaped, escapeChar);
+            case VOID:
+                return LAZY_VOID_OBJECT_INSPECTOR;
+            default:
+                throw new RuntimeException("Internal error: Cannot find ObjectInspector " + " for " + primitiveCategory);
+        }
+    }
+
+    private LazyPrimitiveObjectInspectorFactory() {
+        // prevent instantiation
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyShortObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyShortObjectInspector.java
new file mode 100644
index 0000000..b02d9bc
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyShortObjectInspector.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyShort;
+
+/**
+ * A WritableShortObjectInspector inspects a ShortWritable Object.
+ */
+public class LazyShortObjectInspector extends AbstractPrimitiveLazyObjectInspector<ShortWritable> implements
+        ShortObjectInspector {
+
+    LazyShortObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.shortTypeEntry);
+    }
+
+    @Override
+    public short get(Object o) {
+        return getPrimitiveWritableObject(o).get();
+    }
+
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyShort((LazyShort) o);
+    }
+
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        return o == null ? null : Short.valueOf(get(o));
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyStringObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyStringObjectInspector.java
new file mode 100644
index 0000000..4d649dc
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyStringObjectInspector.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.io.Text;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyString;
+
+/**
+ * A WritableStringObjectInspector inspects a Text Object.
+ */
+public class LazyStringObjectInspector extends AbstractPrimitiveLazyObjectInspector<Text> implements
+        StringObjectInspector {
+
+    boolean escaped;
+    byte escapeChar;
+
+    LazyStringObjectInspector(boolean escaped, byte escapeChar) {
+        super(PrimitiveObjectInspectorUtils.stringTypeEntry);
+        this.escaped = escaped;
+        this.escapeChar = escapeChar;
+    }
+
+    @Override
+    public Object copyObject(Object o) {
+        return o == null ? null : new LazyString((LazyString) o);
+    }
+
+    @Override
+    public Text getPrimitiveWritableObject(Object o) {
+        return o == null ? null : ((LazyString) o).getWritableObject();
+    }
+
+    @Override
+    public String getPrimitiveJavaObject(Object o) {
+        return o == null ? null : ((LazyString) o).getWritableObject().toString();
+    }
+
+    public boolean isEscaped() {
+        return escaped;
+    }
+
+    public byte getEscapeChar() {
+        return escapeChar;
+    }
+
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyVoidObjectInspector.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyVoidObjectInspector.java
new file mode 100644
index 0000000..c916191
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/LazyVoidObjectInspector.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
+import org.apache.hadoop.io.NullWritable;
+
+/**
+ * A WritableVoidObjectInspector inspects a NullWritable Object.
+ */
+public class LazyVoidObjectInspector extends AbstractPrimitiveLazyObjectInspector<NullWritable> implements
+        VoidObjectInspector {
+
+    LazyVoidObjectInspector() {
+        super(PrimitiveObjectInspectorUtils.voidTypeEntry);
+    }
+
+    @Override
+    public Object copyObject(Object o) {
+        return o;
+    }
+
+    @Override
+    public Object getPrimitiveJavaObject(Object o) {
+        throw new RuntimeException("Internal error: cannot create Void object.");
+    }
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
new file mode 100644
index 0000000..33f0e51
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
@@ -0,0 +1,76 @@
+/**

+ * Licensed to the Apache Software Foundation (ASF) under one

+ * or more contributor license agreements.  See the NOTICE file

+ * distributed with this work for additional information

+ * regarding copyright ownership.  The ASF licenses this file

+ * to you under the Apache License, Version 2.0 (the

+ * "License"); you may not use this file except in compliance

+ * with the License.  You may obtain a copy of the License at

+ *

+ *     http://www.apache.org/licenses/LICENSE-2.0

+ *

+ * Unless required by applicable law or agreed to in writing, software

+ * distributed under the License is distributed on an "AS IS" BASIS,

+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+ * See the License for the specific language governing permissions and

+ * limitations under the License.

+ */

+package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;

+

+import java.util.HashMap;

+

+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;

+

+/**

+ * PrimitiveObjectInspectorFactory is the primary way to create new

+ * PrimitiveObjectInspector instances.

+ * The reason of having caches here is that ObjectInspector is because

+ * ObjectInspectors do not have an internal state - so ObjectInspectors with the

+ * same construction parameters should result in exactly the same

+ * ObjectInspector.

+ */

+public final class PrimitiveObjectInspectorFactory {

+

+    public static final LazyBooleanObjectInspector LazyBooleanObjectInspector = new LazyBooleanObjectInspector();

+    public static final LazyByteObjectInspector LazyByteObjectInspector = new LazyByteObjectInspector();

+    public static final LazyShortObjectInspector LazyShortObjectInspector = new LazyShortObjectInspector();

+    public static final LazyIntObjectInspector LazyIntObjectInspector = new LazyIntObjectInspector();

+    public static final LazyLongObjectInspector LazyLongObjectInspector = new LazyLongObjectInspector();

+    public static final LazyFloatObjectInspector LazyFloatObjectInspector = new LazyFloatObjectInspector();

+    public static final LazyDoubleObjectInspector LazyDoubleObjectInspector = new LazyDoubleObjectInspector();

+    public static final LazyStringObjectInspector LazyStringObjectInspector = new LazyStringObjectInspector(false,

+            (byte) '\\');

+    public static final LazyVoidObjectInspector LazyVoidObjectInspector = new LazyVoidObjectInspector();

+

+    private static HashMap<PrimitiveCategory, AbstractPrimitiveLazyObjectInspector<?>> cachedPrimitiveLazyInspectorCache = new HashMap<PrimitiveCategory, AbstractPrimitiveLazyObjectInspector<?>>();

+

+    static {

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.BOOLEAN, LazyBooleanObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.BYTE, LazyByteObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.SHORT, LazyShortObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.INT, LazyIntObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.LONG, LazyLongObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.FLOAT, LazyFloatObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.DOUBLE, LazyDoubleObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.STRING, LazyStringObjectInspector);

+        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.VOID, LazyVoidObjectInspector);

+    }

+

+    /**

+     * Returns the PrimitiveWritableObjectInspector for the PrimitiveCategory.

+     * 

+     * @param primitiveCategory

+     */

+    public static AbstractPrimitiveLazyObjectInspector<?> getPrimitiveLazyObjectInspector(

+            PrimitiveCategory primitiveCategory) {

+        AbstractPrimitiveLazyObjectInspector<?> result = cachedPrimitiveLazyInspectorCache.get(primitiveCategory);

+        if (result == null) {

+            throw new RuntimeException("Internal error: Cannot find ObjectInspector " + " for " + primitiveCategory);

+        }

+        return result;

+    }

+

+    private PrimitiveObjectInspectorFactory() {

+        // prevent instantiation

+    }

+}

diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java
new file mode 100644
index 0000000..7830c52
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hivesterix.serde.parser;
+
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public interface IHiveParser {
+    /**
+     * parse one hive rwo into
+     * 
+     * @param row
+     * @param objectInspector
+     * @param tb
+     */
+    public void parse(byte[] data, int start, int length, ArrayTupleBuilder tb) throws IOException;
+}
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java
new file mode 100644
index 0000000..38e1b36
--- /dev/null
+++ b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java
@@ -0,0 +1,174 @@
+package edu.uci.ics.hivesterix.serde.parser;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
+import org.apache.hadoop.hive.serde2.lazy.LazyLong;
+import org.apache.hadoop.hive.serde2.lazy.LazyShort;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.io.Text;
+
+import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public class TextToBinaryTupleParser implements IHiveParser {
+    private int[] invertedIndex;
+    private int[] fieldEnds;
+    private int lastNecessaryFieldIndex;
+    private LazySimpleStructObjectInspector inputObjectInspector;
+    private List<? extends StructField> fieldRefs;
+
+    public TextToBinaryTupleParser(int[] outputColumnsOffset, ObjectInspector structInspector) {
+        int size = 0;
+        for (int i = 0; i < outputColumnsOffset.length; i++)
+            if (outputColumnsOffset[i] >= 0)
+                size++;
+        invertedIndex = new int[size];
+        for (int i = 0; i < outputColumnsOffset.length; i++)
+            if (outputColumnsOffset[i] >= 0) {
+                invertedIndex[outputColumnsOffset[i]] = i;
+                lastNecessaryFieldIndex = i;
+            }
+        fieldEnds = new int[outputColumnsOffset.length];
+        for (int i = 0; i < fieldEnds.length; i++)
+            fieldEnds[i] = 0;
+        inputObjectInspector = (LazySimpleStructObjectInspector) structInspector;
+        fieldRefs = inputObjectInspector.getAllStructFieldRefs();
+    }
+
+    @Override
+    public void parse(byte[] bytes, int start, int length, ArrayTupleBuilder tb) throws IOException {
+        byte separator = inputObjectInspector.getSeparator();
+        boolean lastColumnTakesRest = inputObjectInspector.getLastColumnTakesRest();
+        boolean isEscaped = inputObjectInspector.isEscaped();
+        byte escapeChar = inputObjectInspector.getEscapeChar();
+        DataOutput output = tb.getDataOutput();
+
+        int structByteEnd = start + length - 1;
+        int fieldId = 0;
+        int fieldByteEnd = start;
+
+        // Go through all bytes in the byte[]
+        while (fieldByteEnd <= structByteEnd && fieldId <= lastNecessaryFieldIndex) {
+            if (fieldByteEnd == structByteEnd || bytes[fieldByteEnd] == separator) {
+                // Reached the end of a field?
+                if (lastColumnTakesRest && fieldId == fieldEnds.length - 1) {
+                    fieldByteEnd = structByteEnd;
+                }
+                fieldEnds[fieldId] = fieldByteEnd;
+                if (fieldId == fieldEnds.length - 1 || fieldByteEnd == structByteEnd) {
+                    // for the case of null fields
+                    for (int i = fieldId; i < fieldEnds.length; i++) {
+                        fieldEnds[i] = fieldByteEnd;
+                    }
+                    break;
+                }
+                fieldByteEnd++;
+                fieldId++;
+            } else {
+                if (isEscaped && bytes[fieldByteEnd] == escapeChar && fieldByteEnd + 1 < structByteEnd) {
+                    // ignore the char after escape_char
+                    fieldByteEnd += 2;
+                } else {
+                    fieldByteEnd++;
+                }
+            }
+        }
+
+        for (int i = 0; i < invertedIndex.length; i++) {
+            int index = invertedIndex[i];
+            StructField fieldRef = fieldRefs.get(index);
+            ObjectInspector inspector = fieldRef.getFieldObjectInspector();
+            Category category = inspector.getCategory();
+            int fieldStart = index == 0 ? 0 : fieldEnds[index - 1] + 1;
+            int fieldEnd = fieldEnds[index];
+            if (bytes[fieldEnd] == separator)
+                fieldEnd--;
+            int fieldLen = fieldEnd - fieldStart + 1;
+            switch (category) {
+                case PRIMITIVE:
+                    PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
+                    switch (poi.getPrimitiveCategory()) {
+                        case VOID: {
+                            break;
+                        }
+                        case BOOLEAN: {
+                            output.write(bytes[fieldStart]);
+                            break;
+                        }
+                        case BYTE: {
+                            output.write(bytes[fieldStart]);
+                            break;
+                        }
+                        case SHORT: {
+                            short v = LazyShort.parseShort(bytes, fieldStart, fieldLen);
+                            output.write((byte) (v >> 8));
+                            output.write((byte) (v));
+                            break;
+                        }
+                        case INT: {
+                            int v = LazyInteger.parseInt(bytes, fieldStart, fieldLen);
+                            LazyUtils.writeVInt(output, v);
+                            break;
+                        }
+                        case LONG: {
+                            long v = LazyLong.parseLong(bytes, fieldStart, fieldLen);
+                            LazyUtils.writeVLong(output, v);
+                            break;
+                        }
+                        case FLOAT: {
+                            float value = Float.parseFloat(Text.decode(bytes, fieldStart, fieldLen));
+                            int v = Float.floatToIntBits(value);
+                            output.write((byte) (v >> 24));
+                            output.write((byte) (v >> 16));
+                            output.write((byte) (v >> 8));
+                            output.write((byte) (v));
+                            break;
+                        }
+                        case DOUBLE: {
+                            try {
+                                double value = Double.parseDouble(Text.decode(bytes, fieldStart, fieldLen));
+                                long v = Double.doubleToLongBits(value);
+                                output.write((byte) (v >> 56));
+                                output.write((byte) (v >> 48));
+                                output.write((byte) (v >> 40));
+                                output.write((byte) (v >> 32));
+                                output.write((byte) (v >> 24));
+                                output.write((byte) (v >> 16));
+                                output.write((byte) (v >> 8));
+                                output.write((byte) (v));
+                            } catch (NumberFormatException e) {
+                                throw e;
+                            }
+                            break;
+                        }
+                        case STRING: {
+                            LazyUtils.writeVInt(output, fieldLen);
+                            output.write(bytes, fieldStart, fieldLen);
+                            break;
+                        }
+                        default: {
+                            throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
+                        }
+                    }
+                    break;
+                case STRUCT:
+                    throw new NotImplementedException("Unrecognized type: struct ");
+                case LIST:
+                    throw new NotImplementedException("Unrecognized type: struct ");
+                case MAP:
+                    throw new NotImplementedException("Unrecognized type: struct ");
+                case UNION:
+                    throw new NotImplementedException("Unrecognized type: struct ");
+            }
+            tb.addFieldEndOffset();
+        }
+    }
+}
diff --git a/hivesterix/hivesterix-translator/pom.xml b/hivesterix/hivesterix-translator/pom.xml
new file mode 100644
index 0000000..b99d652
--- /dev/null
+++ b/hivesterix/hivesterix-translator/pom.xml
@@ -0,0 +1,61 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>hivesterix-translator</artifactId>
+	<name>hivesterix-translator</name>
+
+	<parent>
+		<artifactId>hivesterix</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+					<encoding>UTF-8</encoding>
+					<fork>true</fork>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<dependencies>
+		<dependency>
+			<groupId>org.apache.hadoop.hive</groupId>
+			<artifactId>hive-exec</artifactId>
+			<version>0.7.0</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>algebricks-compiler</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hivesterix-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hivesterix-runtime</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java
new file mode 100644
index 0000000..80b3fef
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java
@@ -0,0 +1,807 @@
+package edu.uci.ics.hivesterix.logical.plan;

+

+import java.io.OutputStreamWriter;

+import java.io.PrintWriter;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.List;

+import java.util.Map;

+import java.util.Map.Entry;

+import java.util.Set;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.ColumnInfo;

+import org.apache.hadoop.hive.ql.exec.ExtractOperator;

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.FilterOperator;

+import org.apache.hadoop.hive.ql.exec.GroupByOperator;

+import org.apache.hadoop.hive.ql.exec.JoinOperator;

+import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;

+import org.apache.hadoop.hive.ql.exec.LimitOperator;

+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.exec.SelectOperator;

+import org.apache.hadoop.hive.ql.exec.TableScanOperator;

+import org.apache.hadoop.hive.ql.exec.UDF;

+import org.apache.hadoop.hive.ql.exec.UDTFOperator;

+import org.apache.hadoop.hive.ql.exec.UnionOperator;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.ql.plan.api.OperatorType;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+

+import edu.uci.ics.hivesterix.logical.expression.ExpressionConstant;

+import edu.uci.ics.hivesterix.logical.expression.HiveAlgebricksBuiltInFunctionMap;

+import edu.uci.ics.hivesterix.logical.expression.HiveFunctionInfo;

+import edu.uci.ics.hivesterix.logical.expression.HivesterixConstantValue;

+import edu.uci.ics.hivesterix.logical.plan.visitor.ExtractVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.FilterVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.GroupByVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.JoinVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.LateralViewJoinVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.LimitVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.MapJoinVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.ProjectVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.SortVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.TableScanWriteVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.UnionVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Visitor;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;

+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;

+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;

+

+@SuppressWarnings("rawtypes")

+public class HiveAlgebricksTranslator implements Translator {

+

+    private int currentVariable = 0;

+

+    private List<Mutable<ILogicalOperator>> logicalOp = new ArrayList<Mutable<ILogicalOperator>>();

+

+    private boolean continueTraverse = true;

+

+    private IMetadataProvider<PartitionDesc, Object> metaData;

+

+    /**

+     * map variable name to the logical variable

+     */

+    private HashMap<String, LogicalVariable> nameToLogicalVariableMap = new HashMap<String, LogicalVariable>();

+

+    /**

+     * map field name to LogicalVariable

+     */

+    private HashMap<String, LogicalVariable> fieldToLogicalVariableMap = new HashMap<String, LogicalVariable>();

+

+    /**

+     * map logical variable to name

+     */

+    private HashMap<LogicalVariable, String> logicalVariableToFieldMap = new HashMap<LogicalVariable, String>();

+

+    /**

+     * asterix root operators

+     */

+    private List<Mutable<ILogicalOperator>> rootOperators = new ArrayList<Mutable<ILogicalOperator>>();

+

+    /**

+     * a list of visitors

+     */

+    private List<Visitor> visitors = new ArrayList<Visitor>();

+

+    /**

+     * output writer to print things out

+     */

+    private static PrintWriter outputWriter = new PrintWriter(new OutputStreamWriter(System.out));

+

+    /**

+     * map a logical variable to type info

+     */

+    private HashMap<LogicalVariable, TypeInfo> variableToType = new HashMap<LogicalVariable, TypeInfo>();

+

+    @Override

+    public LogicalVariable getVariable(String fieldName, TypeInfo type) {

+        LogicalVariable var = fieldToLogicalVariableMap.get(fieldName);

+        if (var == null) {

+            currentVariable++;

+            var = new LogicalVariable(currentVariable);

+            fieldToLogicalVariableMap.put(fieldName, var);

+            nameToLogicalVariableMap.put(var.toString(), var);

+            variableToType.put(var, type);

+            logicalVariableToFieldMap.put(var, fieldName);

+        }

+        return var;

+    }

+

+    @Override

+    public LogicalVariable getNewVariable(String fieldName, TypeInfo type) {

+        currentVariable++;

+        LogicalVariable var = new LogicalVariable(currentVariable);

+        fieldToLogicalVariableMap.put(fieldName, var);

+        nameToLogicalVariableMap.put(var.toString(), var);

+        variableToType.put(var, type);

+        logicalVariableToFieldMap.put(var, fieldName);

+        return var;

+    }

+

+    @Override

+    public void replaceVariable(LogicalVariable oldVar, LogicalVariable newVar) {

+        String name = this.logicalVariableToFieldMap.get(oldVar);

+        if (name != null) {

+            fieldToLogicalVariableMap.put(name, newVar);

+            nameToLogicalVariableMap.put(newVar.toString(), newVar);

+            nameToLogicalVariableMap.put(oldVar.toString(), newVar);

+            logicalVariableToFieldMap.put(newVar, name);

+        }

+    }

+

+    @Override

+    public IMetadataProvider<PartitionDesc, Object> getMetadataProvider() {

+        return metaData;

+    }

+

+    /**

+     * only get an variable, without rewriting it

+     * 

+     * @param fieldName

+     * @return

+     */

+    private LogicalVariable getVariableOnly(String fieldName) {

+        return fieldToLogicalVariableMap.get(fieldName);

+    }

+

+    private void updateVariable(String fieldName, LogicalVariable variable) {

+        LogicalVariable var = fieldToLogicalVariableMap.get(fieldName);

+        if (var == null) {

+            fieldToLogicalVariableMap.put(fieldName, variable);

+            nameToLogicalVariableMap.put(fieldName, variable);

+        } else if (!var.equals(variable)) {

+            fieldToLogicalVariableMap.put(fieldName, variable);

+            nameToLogicalVariableMap.put(fieldName, variable);

+        }

+    }

+

+    /**

+     * get a list of logical variables from the schema

+     * 

+     * @param schema

+     * @return

+     */

+    @Override

+    public List<LogicalVariable> getVariablesFromSchema(Schema schema) {

+        List<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        List<String> names = schema.getNames();

+

+        for (String name : names)

+            variables.add(nameToLogicalVariableMap.get(name));

+        return variables;

+    }

+

+    /**

+     * get variable to typeinfo map

+     * 

+     * @return

+     */

+    public HashMap<LogicalVariable, TypeInfo> getVariableContext() {

+        return this.variableToType;

+    }

+

+    /**

+     * get the number of variables s

+     * 

+     * @return

+     */

+    public int getVariableCounter() {

+        return currentVariable + 1;

+    }

+

+    /**

+     * translate from hive operator tree to asterix operator tree

+     * 

+     * @param hive

+     *            roots

+     * @return Algebricks roots

+     */

+    public void translate(List<Operator> hiveRoot, ILogicalOperator parentOperator,

+            HashMap<String, PartitionDesc> aliasToPathMap) throws AlgebricksException {

+        /**

+         * register visitors

+         */

+        visitors.add(new FilterVisitor());

+        visitors.add(new GroupByVisitor());

+        visitors.add(new JoinVisitor());

+        visitors.add(new LateralViewJoinVisitor());

+        visitors.add(new UnionVisitor());

+        visitors.add(new LimitVisitor());

+        visitors.add(new MapJoinVisitor());

+        visitors.add(new ProjectVisitor());

+        visitors.add(new SortVisitor());

+        visitors.add(new ExtractVisitor());

+        visitors.add(new TableScanWriteVisitor(aliasToPathMap));

+

+        List<Mutable<ILogicalOperator>> refList = translate(hiveRoot, new MutableObject<ILogicalOperator>(

+                parentOperator));

+        insertReplicateOperator(refList);

+        if (refList != null)

+            rootOperators.addAll(refList);

+    }

+

+    /**

+     * translate operator DAG

+     * 

+     * @param hiveRoot

+     * @param AlgebricksParentOperator

+     * @return

+     */

+    private List<Mutable<ILogicalOperator>> translate(List<Operator> hiveRoot,

+            Mutable<ILogicalOperator> AlgebricksParentOperator) throws AlgebricksException {

+

+        for (Operator hiveOperator : hiveRoot) {

+            continueTraverse = true;

+            Mutable<ILogicalOperator> currentOperatorRef = null;

+            if (hiveOperator.getType() == OperatorType.FILTER) {

+                FilterOperator fop = (FilterOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.REDUCESINK) {

+                ReduceSinkOperator fop = (ReduceSinkOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.JOIN) {

+                JoinOperator fop = (JoinOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null) {

+                        continueTraverse = true;

+                        break;

+                    } else

+                        continueTraverse = false;

+                }

+                if (currentOperatorRef == null)

+                    return null;

+            } else if (hiveOperator.getType() == OperatorType.LATERALVIEWJOIN) {

+                LateralViewJoinOperator fop = (LateralViewJoinOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+                if (currentOperatorRef == null)

+                    return null;

+            } else if (hiveOperator.getType() == OperatorType.MAPJOIN) {

+                MapJoinOperator fop = (MapJoinOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null) {

+                        continueTraverse = true;

+                        break;

+                    } else

+                        continueTraverse = false;

+                }

+                if (currentOperatorRef == null)

+                    return null;

+            } else if (hiveOperator.getType() == OperatorType.SELECT) {

+                SelectOperator fop = (SelectOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.EXTRACT) {

+                ExtractOperator fop = (ExtractOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.GROUPBY) {

+                GroupByOperator fop = (GroupByOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.TABLESCAN) {

+                TableScanOperator fop = (TableScanOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.FILESINK) {

+                FileSinkOperator fop = (FileSinkOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.LIMIT) {

+                LimitOperator lop = (LimitOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.UDTF) {

+                UDTFOperator lop = (UDTFOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null)

+                        break;

+                }

+            } else if (hiveOperator.getType() == OperatorType.UNION) {

+                UnionOperator lop = (UnionOperator) hiveOperator;

+                for (Visitor visitor : visitors) {

+                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);

+                    if (currentOperatorRef != null) {

+                        continueTraverse = true;

+                        break;

+                    } else

+                        continueTraverse = false;

+                }

+            } else

+                ;

+            if (hiveOperator.getChildOperators() != null && hiveOperator.getChildOperators().size() > 0

+                    && continueTraverse) {

+                @SuppressWarnings("unchecked")

+                List<Operator> children = hiveOperator.getChildOperators();

+                if (currentOperatorRef == null)

+                    currentOperatorRef = AlgebricksParentOperator;

+                translate(children, currentOperatorRef);

+            }

+            if (hiveOperator.getChildOperators() == null || hiveOperator.getChildOperators().size() == 0)

+                logicalOp.add(currentOperatorRef);

+        }

+        return logicalOp;

+    }

+

+    /**

+     * used in select, group by to get no-column-expression columns

+     * 

+     * @param cols

+     * @return

+     */

+    public ILogicalOperator getAssignOperator(Mutable<ILogicalOperator> parent, List<ExprNodeDesc> cols,

+            ArrayList<LogicalVariable> variables) {

+

+        ArrayList<Mutable<ILogicalExpression>> expressions = new ArrayList<Mutable<ILogicalExpression>>();

+

+        /**

+         * variables to be appended in the assign operator

+         */

+        ArrayList<LogicalVariable> appendedVariables = new ArrayList<LogicalVariable>();

+

+        // one variable can only be assigned once

+        for (ExprNodeDesc hiveExpr : cols) {

+            rewriteExpression(hiveExpr);

+

+            if (hiveExpr instanceof ExprNodeColumnDesc) {

+                ExprNodeColumnDesc desc2 = (ExprNodeColumnDesc) hiveExpr;

+                String fieldName = desc2.getTabAlias() + "." + desc2.getColumn();

+

+                // System.out.println("project expr: " + fieldName);

+

+                if (fieldName.indexOf("$$") < 0) {

+                    LogicalVariable var = getVariable(fieldName, hiveExpr.getTypeInfo());

+                    desc2.setColumn(var.toString());

+                    desc2.setTabAlias("");

+                    variables.add(var);

+                } else {

+                    LogicalVariable var = nameToLogicalVariableMap.get(desc2.getColumn());

+                    String name = this.logicalVariableToFieldMap.get(var);

+                    var = this.getVariableOnly(name);

+                    variables.add(var);

+                }

+            } else {

+                Mutable<ILogicalExpression> asterixExpr = translateScalarFucntion(hiveExpr);

+                expressions.add(asterixExpr);

+                LogicalVariable var = getVariable(hiveExpr.getExprString() + asterixExpr.hashCode(),

+                        hiveExpr.getTypeInfo());

+                variables.add(var);

+                appendedVariables.add(var);

+            }

+        }

+

+        /**

+         * create an assign operator to deal with appending

+         */

+        ILogicalOperator assignOp = null;

+        if (appendedVariables.size() > 0) {

+            assignOp = new AssignOperator(appendedVariables, expressions);

+            assignOp.getInputs().add(parent);

+        }

+        return assignOp;

+    }

+

+    private ILogicalPlan plan;

+

+    public ILogicalPlan genLogicalPlan() {

+        plan = new ALogicalPlanImpl(rootOperators);

+        return plan;

+    }

+

+    public void printOperators() throws AlgebricksException {

+        LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();

+        StringBuilder buffer = new StringBuilder();

+        PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);

+        outputWriter.println(buffer);

+        outputWriter.println("rewritten variables: ");

+        outputWriter.flush();

+        printVariables();

+

+    }

+

+    public static void setOutputPrinter(PrintWriter writer) {

+        outputWriter = writer;

+    }

+

+    private void printVariables() {

+        Set<Entry<String, LogicalVariable>> entries = fieldToLogicalVariableMap.entrySet();

+

+        for (Entry<String, LogicalVariable> entry : entries) {

+            outputWriter.println(entry.getKey() + " -> " + entry.getValue());

+        }

+        outputWriter.flush();

+    }

+

+    /**

+     * generate the object inspector for the output of an operator

+     * 

+     * @param operator

+     *            The Hive operator

+     * @return an ObjectInspector object

+     */

+    public Schema generateInputSchema(Operator operator) {

+        List<String> variableNames = new ArrayList<String>();

+        List<TypeInfo> typeList = new ArrayList<TypeInfo>();

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

+

+        for (ColumnInfo col : columns) {

+            // typeList.add();

+            TypeInfo type = col.getType();

+            typeList.add(type);

+

+            String fieldName = col.getInternalName();

+            variableNames.add(fieldName);

+        }

+

+        return new Schema(variableNames, typeList);

+    }

+

+    /**

+     * rewrite the names of output columns for feature expression evaluators to

+     * use

+     * 

+     * @param operator

+     */

+    public void rewriteOperatorOutputSchema(Operator operator) {

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

+

+        for (ColumnInfo column : columns) {

+            String columnName = column.getTabAlias() + "." + column.getInternalName();

+            if (columnName.indexOf("$$") < 0) {

+                LogicalVariable var = getVariable(columnName, column.getType());

+                column.setInternalName(var.toString());

+            }

+        }

+    }

+

+    @Override

+    public void rewriteOperatorOutputSchema(List<LogicalVariable> variables, Operator operator) {

+

+        //printOperatorSchema(operator);

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

+        if (variables.size() != columns.size()) {

+            throw new IllegalStateException("output cardinality error " + operator.getName() + " variable size: "

+                    + variables.size() + " expected " + columns.size());

+        }

+

+        for (int i = 0; i < variables.size(); i++) {

+            LogicalVariable var = variables.get(i);

+            ColumnInfo column = columns.get(i);

+            String fieldName = column.getTabAlias() + "." + column.getInternalName();

+            if (fieldName.indexOf("$$") < 0) {

+                updateVariable(fieldName, var);

+                column.setInternalName(var.toString());

+            }

+        }

+        //printOperatorSchema(operator);

+    }

+

+    /**

+     * rewrite an expression and substitute variables

+     * 

+     * @param expr

+     *            hive expression

+     */

+    public void rewriteExpression(ExprNodeDesc expr) {

+        if (expr instanceof ExprNodeColumnDesc) {

+            ExprNodeColumnDesc desc = (ExprNodeColumnDesc) expr;

+            String fieldName = desc.getTabAlias() + "." + desc.getColumn();

+            if (fieldName.indexOf("$$") < 0) {

+                LogicalVariable var = getVariableOnly(fieldName);

+                if (var == null) {

+                    fieldName = "." + desc.getColumn();

+                    var = getVariableOnly(fieldName);

+                    if (var == null) {

+                        fieldName = "null." + desc.getColumn();

+                        var = getVariableOnly(fieldName);

+                        if (var == null) {

+                            throw new IllegalStateException(fieldName + " is wrong!!! ");

+                        }

+                    }

+                }

+                String name = this.logicalVariableToFieldMap.get(var);

+                var = getVariableOnly(name);

+                desc.setColumn(var.toString());

+            }

+        } else {

+            if (expr.getChildren() != null && expr.getChildren().size() > 0) {

+                List<ExprNodeDesc> children = expr.getChildren();

+                for (ExprNodeDesc desc : children)

+                    rewriteExpression(desc);

+            }

+        }

+    }

+

+    /**

+     * rewrite an expression and substitute variables

+     * 

+     * @param expr

+     *            hive expression

+     */

+    public void rewriteExpressionPartial(ExprNodeDesc expr) {

+        if (expr instanceof ExprNodeColumnDesc) {

+            ExprNodeColumnDesc desc = (ExprNodeColumnDesc) expr;

+            String fieldName = desc.getTabAlias() + "." + desc.getColumn();

+            if (fieldName.indexOf("$$") < 0) {

+                LogicalVariable var = getVariableOnly(fieldName);

+                desc.setColumn(var.toString());

+            }

+        } else {

+            if (expr.getChildren() != null && expr.getChildren().size() > 0) {

+                List<ExprNodeDesc> children = expr.getChildren();

+                for (ExprNodeDesc desc : children)

+                    rewriteExpressionPartial(desc);

+            }

+        }

+    }

+

+    // private void printOperatorSchema(Operator operator) {

+    // // System.out.println(operator.getName());

+    // // List<ColumnInfo> columns = operator.getSchema().getSignature();

+    // // for (ColumnInfo column : columns) {

+    // // System.out.print(column.getTabAlias() + "." +

+    // // column.getInternalName() + "  ");

+    // // }

+    // // System.out.println();

+    // }

+

+    /**

+     * translate scalar function expression

+     * 

+     * @param hiveExpr

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateScalarFucntion(ExprNodeDesc hiveExpr) {

+        ILogicalExpression AlgebricksExpr;

+

+        if (hiveExpr instanceof ExprNodeGenericFuncDesc) {

+            List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();

+            List<ExprNodeDesc> children = hiveExpr.getChildren();

+

+            for (ExprNodeDesc child : children)

+                arguments.add(translateScalarFucntion(child));

+

+            ExprNodeGenericFuncDesc funcExpr = (ExprNodeGenericFuncDesc) hiveExpr;

+            GenericUDF genericUdf = funcExpr.getGenericUDF();

+            UDF udf = null;

+            if (genericUdf instanceof GenericUDFBridge) {

+                GenericUDFBridge bridge = (GenericUDFBridge) genericUdf;

+                try {

+                    udf = bridge.getUdfClass().newInstance();

+                } catch (Exception e) {

+                    e.printStackTrace();

+                }

+            }

+

+            /**

+             * set up the hive function

+             */

+            Object hiveFunction = genericUdf;

+            if (udf != null)

+                hiveFunction = udf;

+

+            FunctionIdentifier funcId = HiveAlgebricksBuiltInFunctionMap.INSTANCE.getAlgebricksFunctionId(hiveFunction

+                    .getClass());

+            if (funcId == null) {

+                funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, hiveFunction.getClass().getName());

+            }

+

+            Object functionInfo = null;

+            if (genericUdf instanceof GenericUDFBridge) {

+                functionInfo = funcExpr;

+            }

+

+            /**

+             * generate the function call expression

+             */

+            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(

+                    funcId, functionInfo), arguments);

+            AlgebricksExpr = AlgebricksFuncExpr;

+

+        } else if (hiveExpr instanceof ExprNodeColumnDesc) {

+            ExprNodeColumnDesc column = (ExprNodeColumnDesc) hiveExpr;

+            LogicalVariable var = this.getVariable(column.getColumn());

+            AlgebricksExpr = new VariableReferenceExpression(var);

+

+        } else if (hiveExpr instanceof ExprNodeFieldDesc) {

+            FunctionIdentifier funcId;

+            funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, ExpressionConstant.FIELDACCESS);

+

+            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(

+                    funcId, hiveExpr));

+            AlgebricksExpr = AlgebricksFuncExpr;

+        } else if (hiveExpr instanceof ExprNodeConstantDesc) {

+            ExprNodeConstantDesc hiveConst = (ExprNodeConstantDesc) hiveExpr;

+            Object value = hiveConst.getValue();

+            AlgebricksExpr = new ConstantExpression(new HivesterixConstantValue(value));

+        } else if (hiveExpr instanceof ExprNodeNullDesc) {

+            FunctionIdentifier funcId;

+            funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, ExpressionConstant.NULL);

+

+            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(

+                    funcId, hiveExpr));

+

+            AlgebricksExpr = AlgebricksFuncExpr;

+        } else {

+            throw new IllegalStateException("unknown hive expression");

+        }

+        return new MutableObject<ILogicalExpression>(AlgebricksExpr);

+    }

+

+    /**

+     * translate aggregation function expression

+     * 

+     * @param aggregateDesc

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateAggregation(AggregationDesc aggregateDesc) {

+

+        String UDAFName = aggregateDesc.getGenericUDAFName();

+

+        List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();

+        List<ExprNodeDesc> children = aggregateDesc.getParameters();

+

+        for (ExprNodeDesc child : children)

+            arguments.add(translateScalarFucntion(child));

+

+        FunctionIdentifier funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, UDAFName + "("

+                + aggregateDesc.getMode() + ")");

+        HiveFunctionInfo funcInfo = new HiveFunctionInfo(funcId, aggregateDesc);

+        AggregateFunctionCallExpression aggregationExpression = new AggregateFunctionCallExpression(funcInfo, false,

+                arguments);

+        return new MutableObject<ILogicalExpression>(aggregationExpression);

+    }

+

+    /**

+     * translate aggregation function expression

+     * 

+     * @param aggregator

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateUnnestFunction(UDTFDesc udtfDesc, Mutable<ILogicalExpression> argument) {

+

+        String UDTFName = udtfDesc.getUDTFName();

+

+        FunctionIdentifier funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, UDTFName);

+        UnnestingFunctionCallExpression unnestingExpression = new UnnestingFunctionCallExpression(new HiveFunctionInfo(

+                funcId, udtfDesc));

+        unnestingExpression.getArguments().add(argument);

+        return new MutableObject<ILogicalExpression>(unnestingExpression);

+    }

+

+    /**

+     * get typeinfo

+     */

+    @Override

+    public TypeInfo getType(LogicalVariable var) {

+        return variableToType.get(var);

+    }

+

+    /**

+     * get variable from variable name

+     */

+    @Override

+    public LogicalVariable getVariable(String name) {

+        return nameToLogicalVariableMap.get(name);

+    }

+

+    @Override

+    public LogicalVariable getVariableFromFieldName(String fieldName) {

+        return this.getVariableOnly(fieldName);

+    }

+

+    /**

+     * set the metadata provider

+     */

+    @Override

+    public void setMetadataProvider(IMetadataProvider<PartitionDesc, Object> metadata) {

+        this.metaData = metadata;

+    }

+

+    /**

+     * insert ReplicateOperator when necessary

+     */

+    private void insertReplicateOperator(List<Mutable<ILogicalOperator>> roots) {

+        Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> childToParentsMap = new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();

+        buildChildToParentsMapping(roots, childToParentsMap);

+        for (Entry<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> entry : childToParentsMap.entrySet()) {

+            List<Mutable<ILogicalOperator>> pList = entry.getValue();

+            if (pList.size() > 1) {

+                ILogicalOperator rop = new ReplicateOperator(pList.size());

+                Mutable<ILogicalOperator> ropRef = new MutableObject<ILogicalOperator>(rop);

+                Mutable<ILogicalOperator> childRef = entry.getKey();

+                rop.getInputs().add(childRef);

+                for (Mutable<ILogicalOperator> parentRef : pList) {

+                    ILogicalOperator parentOp = parentRef.getValue();

+                    int index = parentOp.getInputs().indexOf(childRef);

+                    parentOp.getInputs().set(index, ropRef);

+                }

+            }

+        }

+    }

+

+    /**

+     * build the mapping from child to Parents

+     * 

+     * @param roots

+     * @param childToParentsMap

+     */

+    private void buildChildToParentsMapping(List<Mutable<ILogicalOperator>> roots,

+            Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> map) {

+        for (Mutable<ILogicalOperator> opRef : roots) {

+            List<Mutable<ILogicalOperator>> childRefs = opRef.getValue().getInputs();

+            for (Mutable<ILogicalOperator> childRef : childRefs) {

+                List<Mutable<ILogicalOperator>> parentList = map.get(childRef);

+                if (parentList == null) {

+                    parentList = new ArrayList<Mutable<ILogicalOperator>>();

+                    map.put(childRef, parentList);

+                }

+                if (!parentList.contains(opRef))

+                    parentList.add(opRef);

+            }

+            buildChildToParentsMapping(childRefs, map);

+        }

+    }

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java
new file mode 100644
index 0000000..d5801a3
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.hivesterix.logical.plan;

+

+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlanAndMetadata;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+

+@SuppressWarnings({ "rawtypes", "unchecked" })

+public class HiveLogicalPlanAndMetaData implements ILogicalPlanAndMetadata {

+

+    IMetadataProvider metadata;

+    ILogicalPlan plan;

+

+    public HiveLogicalPlanAndMetaData(ILogicalPlan plan, IMetadataProvider metadata) {

+        this.plan = plan;

+        this.metadata = metadata;

+    }

+

+    @Override

+    public IMetadataProvider getMetadataProvider() {

+        return metadata;

+    }

+

+    @Override

+    public ILogicalPlan getPlan() {

+        return plan;

+    }

+

+    @Override

+    public AlgebricksPartitionConstraint getClusterLocations() {

+        // TODO Auto-generated method stub

+        return null;

+    }

+

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java
new file mode 100644
index 0000000..0ea4e01
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java
@@ -0,0 +1,8 @@
+package edu.uci.ics.hivesterix.logical.plan;
+
+public class HiveOperatorAnnotations {
+
+    // hints
+    public static final String LOCAL_GROUP_BY = "LOCAL_GROUP_BY";
+
+}
diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java
new file mode 100644
index 0000000..1c67bae
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java
@@ -0,0 +1,26 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.ExtractOperator;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+

+public class ExtractVisitor extends DefaultVisitor {

+

+    @Override

+    public Mutable<ILogicalOperator> visit(ExtractOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        Schema currentSchema = t.generateInputSchema(operator.getParentOperators().get(0));

+        operator.setSchema(operator.getParentOperators().get(0).getSchema());

+        List<LogicalVariable> latestOutputSchema = t.getVariablesFromSchema(currentSchema);

+        t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+        return null;

+    }

+

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java
new file mode 100644
index 0000000..9279144
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.FilterOperator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.FilterDesc;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;

+

+public class FilterVisitor extends DefaultVisitor {

+

+    @Override

+    public Mutable<ILogicalOperator> visit(FilterOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        Schema currentSchema = t.generateInputSchema(operator.getParentOperators().get(0));

+

+        FilterDesc desc = (FilterDesc) operator.getConf();

+        ExprNodeDesc predicate = desc.getPredicate();

+        t.rewriteExpression(predicate);

+

+        Mutable<ILogicalExpression> exprs = t.translateScalarFucntion(desc.getPredicate());

+        ILogicalOperator currentOperator = new SelectOperator(exprs);

+        currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+

+        // populate the schema from upstream operator

+        operator.setSchema(operator.getParentOperators().get(0).getSchema());

+        List<LogicalVariable> latestOutputSchema = t.getVariablesFromSchema(currentSchema);

+        t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

+

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java
new file mode 100644
index 0000000..b7d5779
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java
@@ -0,0 +1,264 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.lang.reflect.Field;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.conf.HiveConf;

+import org.apache.hadoop.hive.ql.exec.ColumnInfo;

+import org.apache.hadoop.hive.ql.exec.GroupByOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.GroupByDesc;

+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;

+import org.apache.hadoop.hive.ql.plan.api.OperatorType;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.common.config.ConfUtil;

+import edu.uci.ics.hivesterix.logical.plan.HiveOperatorAnnotations;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;

+

+@SuppressWarnings({ "rawtypes", "unchecked" })

+public class GroupByVisitor extends DefaultVisitor {

+

+    private List<Mutable<ILogicalExpression>> AlgebricksAggs = new ArrayList<Mutable<ILogicalExpression>>();

+    private List<IFunctionInfo> localAggs = new ArrayList<IFunctionInfo>();

+    private boolean isDistinct = false;

+    private boolean gbyKeyNotRedKey = false;

+

+    @Override

+    public Mutable<ILogicalOperator> visit(GroupByOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException {

+

+        // get descriptors

+        GroupByDesc desc = (GroupByDesc) operator.getConf();

+        GroupByDesc.Mode mode = desc.getMode();

+

+        List<ExprNodeDesc> keys = desc.getKeys();

+        List<AggregationDesc> aggregators = desc.getAggregators();

+

+        Operator child = operator.getChildOperators().get(0);

+

+        if (child.getType() == OperatorType.REDUCESINK) {

+            List<ExprNodeDesc> partKeys = ((ReduceSinkDesc) child.getConf()).getPartitionCols();

+            if (keys.size() != partKeys.size())

+                gbyKeyNotRedKey = true;

+        }

+

+        if (mode == GroupByDesc.Mode.PARTIAL1 || mode == GroupByDesc.Mode.HASH || mode == GroupByDesc.Mode.COMPLETE

+                || (aggregators.size() == 0 && isDistinct == false) || gbyKeyNotRedKey) {

+            AlgebricksAggs.clear();

+            // add an assign operator if the key is not a column expression

+            ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+            ILogicalOperator currentOperator = null;

+            ILogicalOperator assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef, keys, keyVariables);

+            if (assignOperator != null) {

+                currentOperator = assignOperator;

+                AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+            }

+

+            // get key variable expression list

+            List<Mutable<ILogicalExpression>> keyExprs = new ArrayList<Mutable<ILogicalExpression>>();

+            for (LogicalVariable var : keyVariables) {

+                keyExprs.add(t.translateScalarFucntion(new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, var

+                        .toString(), "", false)));

+            }

+

+            if (aggregators.size() == 0) {

+                List<Mutable<ILogicalExpression>> distinctExprs = new ArrayList<Mutable<ILogicalExpression>>();

+                for (LogicalVariable var : keyVariables) {

+                    Mutable<ILogicalExpression> varExpr = new MutableObject<ILogicalExpression>(

+                            new VariableReferenceExpression(var));

+                    distinctExprs.add(varExpr);

+                }

+                t.rewriteOperatorOutputSchema(keyVariables, operator);

+                isDistinct = true;

+                ILogicalOperator lop = new DistinctOperator(distinctExprs);

+                lop.getInputs().add(AlgebricksParentOperatorRef);

+                return new MutableObject<ILogicalOperator>(lop);

+            }

+

+            // get the pair<LogicalVariable, ILogicalExpression> list

+            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> keyParameters = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();

+            keyVariables.clear();

+            for (Mutable<ILogicalExpression> expr : keyExprs) {

+                LogicalVariable keyVar = t.getVariable(expr.getValue().toString(), TypeInfoFactory.unknownTypeInfo);

+                keyParameters.add(new Pair(keyVar, expr));

+                keyVariables.add(keyVar);

+            }

+

+            // get the parameters for the aggregator operator

+            ArrayList<LogicalVariable> aggVariables = new ArrayList<LogicalVariable>();

+            ArrayList<Mutable<ILogicalExpression>> aggExprs = new ArrayList<Mutable<ILogicalExpression>>();

+

+            // get the type of each aggregation function

+            HashMap<AggregationDesc, TypeInfo> aggToType = new HashMap<AggregationDesc, TypeInfo>();

+            List<ColumnInfo> columns = operator.getSchema().getSignature();

+            int offset = keys.size();

+            for (int i = offset; i < columns.size(); i++) {

+                aggToType.put(aggregators.get(i - offset), columns.get(i).getType());

+            }

+

+            localAggs.clear();

+            // rewrite parameter expressions for all aggregators

+            for (AggregationDesc aggregator : aggregators) {

+                for (ExprNodeDesc parameter : aggregator.getParameters()) {

+                    t.rewriteExpression(parameter);

+                }

+                Mutable<ILogicalExpression> aggExpr = t.translateAggregation(aggregator);

+                AbstractFunctionCallExpression localAggExpr = (AbstractFunctionCallExpression) aggExpr.getValue();

+                localAggs.add(localAggExpr.getFunctionInfo());

+

+                AggregationDesc logicalAgg = new AggregationDesc(aggregator.getGenericUDAFName(),

+                        aggregator.getGenericUDAFEvaluator(), aggregator.getParameters(), aggregator.getDistinct(),

+                        Mode.COMPLETE);

+                Mutable<ILogicalExpression> logicalAggExpr = t.translateAggregation(logicalAgg);

+

+                AlgebricksAggs.add(logicalAggExpr);

+                if (!gbyKeyNotRedKey)

+                    aggExprs.add(logicalAggExpr);

+                else

+                    aggExprs.add(aggExpr);

+

+                aggVariables.add(t.getVariable(aggregator.getExprString() + aggregator.getMode(),

+                        aggToType.get(aggregator)));

+            }

+

+            if (child.getType() != OperatorType.REDUCESINK)

+                gbyKeyNotRedKey = false;

+

+            // get the sub plan list

+            AggregateOperator aggOperator = new AggregateOperator(aggVariables, aggExprs);

+            NestedTupleSourceOperator nestedTupleSource = new NestedTupleSourceOperator(

+                    new MutableObject<ILogicalOperator>());

+            aggOperator.getInputs().add(new MutableObject<ILogicalOperator>(nestedTupleSource));

+

+            List<Mutable<ILogicalOperator>> subRoots = new ArrayList<Mutable<ILogicalOperator>>();

+            subRoots.add(new MutableObject<ILogicalOperator>(aggOperator));

+            ILogicalPlan subPlan = new ALogicalPlanImpl(subRoots);

+            List<ILogicalPlan> subPlans = new ArrayList<ILogicalPlan>();

+            subPlans.add(subPlan);

+

+            // create the group by operator

+            currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator(

+                    keyParameters, new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(), subPlans);

+            currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+            nestedTupleSource.getDataSourceReference().setValue(currentOperator);

+

+            List<LogicalVariable> outputVariables = new ArrayList<LogicalVariable>();

+            outputVariables.addAll(keyVariables);

+            outputVariables.addAll(aggVariables);

+            t.rewriteOperatorOutputSchema(outputVariables, operator);

+

+            if (gbyKeyNotRedKey) {

+                currentOperator.getAnnotations().put(HiveOperatorAnnotations.LOCAL_GROUP_BY, Boolean.TRUE);

+            }

+

+            HiveConf conf = ConfUtil.getHiveConf();

+            Boolean extGby = conf.getBoolean("hive.algebricks.groupby.external", false);

+

+            if (extGby && isSerializable(aggregators)) {

+                currentOperator.getAnnotations().put(OperatorAnnotations.USE_EXTERNAL_GROUP_BY, Boolean.TRUE);

+            }

+            return new MutableObject<ILogicalOperator>(currentOperator);

+        } else {

+            isDistinct = false;

+            // rewrite parameter expressions for all aggregators

+            int i = 0;

+            for (AggregationDesc aggregator : aggregators) {

+                for (ExprNodeDesc parameter : aggregator.getParameters()) {

+                    t.rewriteExpression(parameter);

+                }

+                Mutable<ILogicalExpression> agg = t.translateAggregation(aggregator);

+                AggregateFunctionCallExpression originalAgg = (AggregateFunctionCallExpression) AlgebricksAggs.get(i)

+                        .getValue();

+                originalAgg.setStepOneAggregate(localAggs.get(i));

+                AggregateFunctionCallExpression currentAgg = (AggregateFunctionCallExpression) agg.getValue();

+                if (currentAgg.getFunctionInfo() != null) {

+                    originalAgg.setTwoStep(true);

+                    originalAgg.setStepTwoAggregate(currentAgg.getFunctionInfo());

+                }

+                i++;

+            }

+            return null;

+        }

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        Operator downStream = (Operator) operator.getChildOperators().get(0);

+        if (!(downStream instanceof GroupByOperator)) {

+            return null;

+        }

+

+        ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

+        List<ExprNodeDesc> keys = desc.getKeyCols();

+        List<ExprNodeDesc> values = desc.getValueCols();

+

+        // insert assign for keys

+        ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+        t.getAssignOperator(AlgebricksParentOperatorRef, keys, keyVariables);

+

+        // insert assign for values

+        ArrayList<LogicalVariable> valueVariables = new ArrayList<LogicalVariable>();

+        t.getAssignOperator(AlgebricksParentOperatorRef, values, valueVariables);

+

+        ArrayList<LogicalVariable> columns = new ArrayList<LogicalVariable>();

+        columns.addAll(keyVariables);

+        columns.addAll(valueVariables);

+

+        t.rewriteOperatorOutputSchema(columns, operator);

+        return null;

+    }

+

+    private boolean isSerializable(List<AggregationDesc> descs) throws AlgebricksException {

+        try {

+            for (AggregationDesc desc : descs) {

+                GenericUDAFEvaluator udaf = desc.getGenericUDAFEvaluator();

+                AggregationBuffer buf = udaf.getNewAggregationBuffer();

+                Class<?> bufferClass = buf.getClass();

+                Field[] fields = bufferClass.getDeclaredFields();

+                for (Field field : fields) {

+                    field.setAccessible(true);

+                    String type = field.getType().toString();

+                    if (!(type.equals("int") || type.equals("long") || type.equals("float") || type.equals("double") || type

+                            .equals("boolean"))) {

+                        return false;

+                    }

+                }

+

+            }

+            return true;

+        } catch (Exception e) {

+            throw new AlgebricksException(e);

+        }

+    }

+

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/JoinVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/JoinVisitor.java
new file mode 100644
index 0000000..ef346bc
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/JoinVisitor.java
@@ -0,0 +1,417 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+import java.util.Map.Entry;

+import java.util.Set;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.JoinOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.ql.plan.JoinCondDesc;

+import org.apache.hadoop.hive.ql.plan.JoinDesc;

+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;

+

+@SuppressWarnings("rawtypes")

+public class JoinVisitor extends DefaultVisitor {

+

+    /**

+     * reduce sink operator to variables

+     */

+    private HashMap<Operator, List<LogicalVariable>> reduceSinkToKeyVariables = new HashMap<Operator, List<LogicalVariable>>();

+

+    /**

+     * reduce sink operator to variables

+     */

+    private HashMap<Operator, List<String>> reduceSinkToFieldNames = new HashMap<Operator, List<String>>();

+

+    /**

+     * reduce sink operator to variables

+     */

+    private HashMap<Operator, List<TypeInfo>> reduceSinkToTypes = new HashMap<Operator, List<TypeInfo>>();

+

+    /**

+     * map a join operator (in hive) to its parent operators (in hive)

+     */

+    private HashMap<Operator, List<Operator>> operatorToHiveParents = new HashMap<Operator, List<Operator>>();

+

+    /**

+     * map a join operator (in hive) to its parent operators (in asterix)

+     */

+    private HashMap<Operator, List<ILogicalOperator>> operatorToAsterixParents = new HashMap<Operator, List<ILogicalOperator>>();

+

+    /**

+     * the latest traversed reduce sink operator

+     */

+    private Operator latestReduceSink = null;

+

+    /**

+     * the latest generated parent for join

+     */

+    private ILogicalOperator latestAlgebricksOperator = null;

+

+    /**

+     * process a join operator

+     */

+    @Override

+    public Mutable<ILogicalOperator> visit(JoinOperator operator, Mutable<ILogicalOperator> AlgebricksParentOperator,

+            Translator t) {

+        latestAlgebricksOperator = AlgebricksParentOperator.getValue();

+        translateJoinOperatorPreprocess(operator, t);

+        List<Operator> parents = operatorToHiveParents.get(operator);

+        if (parents.size() < operator.getParentOperators().size()) {

+            return null;

+        } else {

+            ILogicalOperator joinOp = translateJoinOperator(operator, AlgebricksParentOperator, t);

+            // clearStatus();

+            return new MutableObject<ILogicalOperator>(joinOp);

+        }

+    }

+

+    private void reorder(Byte[] order, List<ILogicalOperator> parents, List<Operator> hiveParents) {

+        ILogicalOperator[] lops = new ILogicalOperator[parents.size()];

+        Operator[] ops = new Operator[hiveParents.size()];

+

+        for (Operator op : hiveParents) {

+            ReduceSinkOperator rop = (ReduceSinkOperator) op;

+            ReduceSinkDesc rdesc = rop.getConf();

+            int tag = rdesc.getTag();

+

+            int index = -1;

+            for (int i = 0; i < order.length; i++)

+                if (order[i] == tag) {

+                    index = i;

+                    break;

+                }

+            lops[index] = parents.get(hiveParents.indexOf(op));

+            ops[index] = op;

+        }

+

+        parents.clear();

+        hiveParents.clear();

+

+        for (int i = 0; i < lops.length; i++) {

+            parents.add(lops[i]);

+            hiveParents.add(ops[i]);

+        }

+    }

+

+    /**

+     * translate a hive join operator to asterix join operator->assign

+     * operator->project operator

+     * 

+     * @param parentOperator

+     * @param operator

+     * @return

+     */

+    private ILogicalOperator translateJoinOperator(Operator operator, Mutable<ILogicalOperator> parentOperator,

+            Translator t) {

+

+        JoinDesc joinDesc = (JoinDesc) operator.getConf();

+

+        // get the projection expression (already re-written) from each source

+        // table

+        Map<Byte, List<ExprNodeDesc>> exprMap = joinDesc.getExprs();

+        reorder(joinDesc.getTagOrder(), operatorToAsterixParents.get(operator), operatorToHiveParents.get(operator));

+

+        // make an reduce join operator

+        ILogicalOperator currentOperator = generateJoinTree(joinDesc.getCondsList(),

+                operatorToAsterixParents.get(operator), operatorToHiveParents.get(operator), 0, t);

+        parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+

+        // add assign and project operator on top of a join

+        // output variables

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        Set<Entry<Byte, List<ExprNodeDesc>>> entries = exprMap.entrySet();

+        Iterator<Entry<Byte, List<ExprNodeDesc>>> iterator = entries.iterator();

+        while (iterator.hasNext()) {

+            List<ExprNodeDesc> outputExprs = iterator.next().getValue();

+            ILogicalOperator assignOperator = t.getAssignOperator(parentOperator, outputExprs, variables);

+

+            if (assignOperator != null) {

+                currentOperator = assignOperator;

+                parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+            }

+        }

+

+        ILogicalOperator po = new ProjectOperator(variables);

+        po.getInputs().add(parentOperator);

+        t.rewriteOperatorOutputSchema(variables, operator);

+        return po;

+    }

+

+    /**

+     * deal with reduce sink operator for the case of join

+     */

+    @Override

+    public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator, Mutable<ILogicalOperator> parentOperator,

+            Translator t) {

+

+        Operator downStream = (Operator) operator.getChildOperators().get(0);

+        if (!(downStream instanceof JoinOperator))

+            return null;

+

+        ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

+        List<ExprNodeDesc> keys = desc.getKeyCols();

+        List<ExprNodeDesc> values = desc.getValueCols();

+        List<ExprNodeDesc> partitionCols = desc.getPartitionCols();

+

+        /**

+         * rewrite key, value, paritioncol expressions

+         */

+        for (ExprNodeDesc key : keys)

+            t.rewriteExpression(key);

+        for (ExprNodeDesc value : values)

+            t.rewriteExpression(value);

+        for (ExprNodeDesc col : partitionCols)

+            t.rewriteExpression(col);

+

+        ILogicalOperator currentOperator = null;

+

+        // add assign operator for keys if necessary

+        ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+        ILogicalOperator assignOperator = t.getAssignOperator(parentOperator, keys, keyVariables);

+        if (assignOperator != null) {

+            currentOperator = assignOperator;

+            parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+        }

+

+        // add assign operator for values if necessary

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        assignOperator = t.getAssignOperator(parentOperator, values, variables);

+        if (assignOperator != null) {

+            currentOperator = assignOperator;

+            parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+        }

+

+        // unified schema: key, value

+        ArrayList<LogicalVariable> unifiedKeyValues = new ArrayList<LogicalVariable>();

+        unifiedKeyValues.addAll(keyVariables);

+        for (LogicalVariable value : variables)

+            if (keyVariables.indexOf(value) < 0)

+                unifiedKeyValues.add(value);

+

+        // insert projection operator, it is a *must*,

+        // in hive, reduce sink sometimes also do the projection operator's

+        // task

+        currentOperator = new ProjectOperator(unifiedKeyValues);

+        currentOperator.getInputs().add(parentOperator);

+        parentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+

+        reduceSinkToKeyVariables.put(operator, keyVariables);

+        List<String> fieldNames = new ArrayList<String>();

+        List<TypeInfo> types = new ArrayList<TypeInfo>();

+        for (LogicalVariable var : unifiedKeyValues) {

+            fieldNames.add(var.toString());

+            types.add(t.getType(var));

+        }

+        reduceSinkToFieldNames.put(operator, fieldNames);

+        reduceSinkToTypes.put(operator, types);

+        t.rewriteOperatorOutputSchema(variables, operator);

+

+        latestAlgebricksOperator = currentOperator;

+        latestReduceSink = operator;

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

+

+    /**

+     * partial rewrite a join operator

+     * 

+     * @param operator

+     * @param t

+     */

+    private void translateJoinOperatorPreprocess(Operator operator, Translator t) {

+        JoinDesc desc = (JoinDesc) operator.getConf();

+        ReduceSinkDesc reduceSinkDesc = (ReduceSinkDesc) latestReduceSink.getConf();

+        int tag = reduceSinkDesc.getTag();

+

+        Map<Byte, List<ExprNodeDesc>> exprMap = desc.getExprs();

+        List<ExprNodeDesc> exprs = exprMap.get(Byte.valueOf((byte) tag));

+

+        for (ExprNodeDesc expr : exprs)

+            t.rewriteExpression(expr);

+

+        List<Operator> parents = operatorToHiveParents.get(operator);

+        if (parents == null) {

+            parents = new ArrayList<Operator>();

+            operatorToHiveParents.put(operator, parents);

+        }

+        parents.add(latestReduceSink);

+

+        List<ILogicalOperator> asterixParents = operatorToAsterixParents.get(operator);

+        if (asterixParents == null) {

+            asterixParents = new ArrayList<ILogicalOperator>();

+            operatorToAsterixParents.put(operator, asterixParents);

+        }

+        asterixParents.add(latestAlgebricksOperator);

+    }

+

+    // generate a join tree from a list of exchange/reducesink operator

+    // both exchanges and reduce sinks have the same order

+    private ILogicalOperator generateJoinTree(List<JoinCondDesc> conds, List<ILogicalOperator> exchanges,

+            List<Operator> reduceSinks, int offset, Translator t) {

+        // get a list of reduce sink descs (input descs)

+        int inputSize = reduceSinks.size() - offset;

+

+        if (inputSize == 2) {

+            ILogicalOperator currentRoot;

+

+            List<ReduceSinkDesc> reduceSinkDescs = new ArrayList<ReduceSinkDesc>();

+            for (int i = reduceSinks.size() - 1; i >= offset; i--)

+                reduceSinkDescs.add((ReduceSinkDesc) reduceSinks.get(i).getConf());

+

+            // get the object inspector for the join

+            List<String> fieldNames = new ArrayList<String>();

+            List<TypeInfo> types = new ArrayList<TypeInfo>();

+            for (int i = reduceSinks.size() - 1; i >= offset; i--) {

+                fieldNames.addAll(reduceSinkToFieldNames.get(reduceSinks.get(i)));

+                types.addAll(reduceSinkToTypes.get(reduceSinks.get(i)));

+            }

+

+            // get number of equality conjunctions in the final join condition

+            int size = reduceSinkDescs.get(0).getKeyCols().size();

+

+            // make up the join conditon expression

+            List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

+            for (int i = 0; i < size; i++) {

+                // create a join key pair

+                List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

+                for (ReduceSinkDesc sink : reduceSinkDescs) {

+                    keyPair.add(sink.getKeyCols().get(i));

+                }

+                // create a hive equal condition

+                ExprNodeDesc equality = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,

+                        new GenericUDFOPEqual(), keyPair);

+                // add the equal condition to the conjunction list

+                joinConditionChildren.add(equality);

+            }

+            // get final conjunction expression

+            ExprNodeDesc conjunct = null;

+

+            if (joinConditionChildren.size() > 1)

+                conjunct = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

+                        joinConditionChildren);

+            else if (joinConditionChildren.size() == 1)

+                conjunct = joinConditionChildren.get(0);

+            else {

+                // there is no join equality condition, equal-join

+                conjunct = new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, new Boolean(true));

+            }

+            // get an ILogicalExpression from hive's expression

+            Mutable<ILogicalExpression> expression = t.translateScalarFucntion(conjunct);

+

+            Mutable<ILogicalOperator> leftBranch = new MutableObject<ILogicalOperator>(

+                    exchanges.get(exchanges.size() - 1));

+            Mutable<ILogicalOperator> rightBranch = new MutableObject<ILogicalOperator>(

+                    exchanges.get(exchanges.size() - 2));

+            // get the join operator

+            if (conds.get(offset).getType() == JoinDesc.LEFT_OUTER_JOIN) {

+                currentRoot = new LeftOuterJoinOperator(expression);

+                Mutable<ILogicalOperator> temp = leftBranch;

+                leftBranch = rightBranch;

+                rightBranch = temp;

+            } else if (conds.get(offset).getType() == JoinDesc.RIGHT_OUTER_JOIN) {

+                currentRoot = new LeftOuterJoinOperator(expression);

+            } else

+                currentRoot = new InnerJoinOperator(expression);

+

+            currentRoot.getInputs().add(leftBranch);

+            currentRoot.getInputs().add(rightBranch);

+

+            // rewriteOperatorOutputSchema(variables, operator);

+            return currentRoot;

+        } else {

+            // get the child join operator and insert and one-to-one exchange

+            ILogicalOperator joinSrcOne = generateJoinTree(conds, exchanges, reduceSinks, offset + 1, t);

+            // joinSrcOne.addInput(childJoin);

+

+            ILogicalOperator currentRoot;

+

+            List<ReduceSinkDesc> reduceSinkDescs = new ArrayList<ReduceSinkDesc>();

+            for (int i = offset; i < offset + 2; i++)

+                reduceSinkDescs.add((ReduceSinkDesc) reduceSinks.get(i).getConf());

+

+            // get the object inspector for the join

+            List<String> fieldNames = new ArrayList<String>();

+            List<TypeInfo> types = new ArrayList<TypeInfo>();

+            for (int i = offset; i < reduceSinks.size(); i++) {

+                fieldNames.addAll(reduceSinkToFieldNames.get(reduceSinks.get(i)));

+                types.addAll(reduceSinkToTypes.get(reduceSinks.get(i)));

+            }

+

+            // get number of equality conjunctions in the final join condition

+            int size = reduceSinkDescs.get(0).getKeyCols().size();

+

+            // make up the join condition expression

+            List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

+            for (int i = 0; i < size; i++) {

+                // create a join key pair

+                List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

+                for (ReduceSinkDesc sink : reduceSinkDescs) {

+                    keyPair.add(sink.getKeyCols().get(i));

+                }

+                // create a hive equal condition

+                ExprNodeDesc equality = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,

+                        new GenericUDFOPEqual(), keyPair);

+                // add the equal condition to the conjunction list

+                joinConditionChildren.add(equality);

+            }

+            // get final conjunction expression

+            ExprNodeDesc conjunct = null;

+

+            if (joinConditionChildren.size() > 1)

+                conjunct = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

+                        joinConditionChildren);

+            else if (joinConditionChildren.size() == 1)

+                conjunct = joinConditionChildren.get(0);

+            else {

+                // there is no join equality condition, full outer join

+                conjunct = new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, new Boolean(true));

+            }

+            // get an ILogicalExpression from hive's expression

+            Mutable<ILogicalExpression> expression = t.translateScalarFucntion(conjunct);

+

+            Mutable<ILogicalOperator> leftBranch = new MutableObject<ILogicalOperator>(joinSrcOne);

+            Mutable<ILogicalOperator> rightBranch = new MutableObject<ILogicalOperator>(exchanges.get(offset));

+

+            // get the join operator

+            if (conds.get(offset).getType() == JoinDesc.LEFT_OUTER_JOIN) {

+                currentRoot = new LeftOuterJoinOperator(expression);

+                Mutable<ILogicalOperator> temp = leftBranch;

+                leftBranch = rightBranch;

+                rightBranch = temp;

+            } else if (conds.get(offset).getType() == JoinDesc.RIGHT_OUTER_JOIN) {

+                currentRoot = new LeftOuterJoinOperator(expression);

+            } else

+                currentRoot = new InnerJoinOperator(expression);

+

+            // set the inputs from Algebricks join operator

+            // add the current table

+            currentRoot.getInputs().add(leftBranch);

+            currentRoot.getInputs().add(rightBranch);

+

+            return currentRoot;

+        }

+    }

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LateralViewJoinVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LateralViewJoinVisitor.java
new file mode 100644
index 0000000..cc19364
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LateralViewJoinVisitor.java
@@ -0,0 +1,110 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.UDTFOperator;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;

+

+/**

+ * The lateral view join operator is used for FROM src LATERAL VIEW udtf()...

+ * This operator was implemented with the following operator DAG in mind.

+ * For a query such as

+ * SELECT pageid, adid.* FROM example_table LATERAL VIEW explode(adid_list) AS

+ * adid

+ * The top of the operator DAG will look similar to

+ * [Table Scan] | [Lateral View Forward] / \ [Select](*) [Select](adid_list) | |

+ * | [UDTF] (explode) \ / [Lateral View Join] | | [Select] (pageid, adid.*) |

+ * ....

+ * Rows from the table scan operator are first to a lateral view forward

+ * operator that just forwards the row and marks the start of a LV. The select

+ * operator on the left picks all the columns while the select operator on the

+ * right picks only the columns needed by the UDTF.

+ * The output of select in the left branch and output of the UDTF in the right

+ * branch are then sent to the lateral view join (LVJ). In most cases, the UDTF

+ * will generate > 1 row for every row received from the TS, while the left

+ * select operator will generate only one. For each row output from the TS, the

+ * LVJ outputs all possible rows that can be created by joining the row from the

+ * left select and one of the rows output from the UDTF.

+ * Additional lateral views can be supported by adding a similar DAG after the

+ * previous LVJ operator.

+ */

+

+@SuppressWarnings("rawtypes")

+public class LateralViewJoinVisitor extends DefaultVisitor {

+

+    private UDTFDesc udtf;

+

+    private List<Mutable<ILogicalOperator>> parents = new ArrayList<Mutable<ILogicalOperator>>();

+

+    @Override

+    public Mutable<ILogicalOperator> visit(LateralViewJoinOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException {

+

+        parents.add(AlgebricksParentOperatorRef);

+        if (operator.getParentOperators().size() > parents.size()) {

+            return null;

+        }

+

+        Operator parent0 = operator.getParentOperators().get(0);

+        ILogicalOperator parentOperator;

+        ILogicalExpression unnestArg;

+        if (parent0 instanceof UDTFOperator) {

+            List<LogicalVariable> unnestVars = new ArrayList<LogicalVariable>();

+            VariableUtilities.getLiveVariables(parents.get(1).getValue(), unnestVars);

+            unnestArg = new VariableReferenceExpression(unnestVars.get(0));

+            parentOperator = parents.get(1).getValue();

+        } else {

+            List<LogicalVariable> unnestVars = new ArrayList<LogicalVariable>();

+            VariableUtilities.getLiveVariables(parents.get(0).getValue(), unnestVars);

+            unnestArg = new VariableReferenceExpression(unnestVars.get(0));

+            parentOperator = parents.get(0).getValue();

+        }

+

+        LogicalVariable var = t.getVariable(udtf.toString(), TypeInfoFactory.unknownTypeInfo);

+

+        Mutable<ILogicalExpression> unnestExpr = t.translateUnnestFunction(udtf, new MutableObject<ILogicalExpression>(

+                unnestArg));

+        ILogicalOperator currentOperator = new UnnestOperator(var, unnestExpr);

+

+        List<LogicalVariable> outputVars = new ArrayList<LogicalVariable>();

+        VariableUtilities.getLiveVariables(parentOperator, outputVars);

+        outputVars.add(var);

+        currentOperator.getInputs().add(new MutableObject<ILogicalOperator>(parentOperator));

+

+        parents.clear();

+        udtf = null;

+        t.rewriteOperatorOutputSchema(outputVars, operator);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(UDTFOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        Schema currentSchema = t.generateInputSchema(operator.getParentOperators().get(0));

+        udtf = (UDTFDesc) operator.getConf();

+

+        // populate the schema from upstream operator

+        operator.setSchema(operator.getParentOperators().get(0).getSchema());

+        List<LogicalVariable> latestOutputSchema = t.getVariablesFromSchema(currentSchema);

+        t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+        return null;

+    }

+

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LimitVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LimitVisitor.java
new file mode 100644
index 0000000..cc10f8f
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/LimitVisitor.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.LimitOperator;

+import org.apache.hadoop.hive.ql.plan.LimitDesc;

+

+import edu.uci.ics.hivesterix.logical.expression.HivesterixConstantValue;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;

+

+public class LimitVisitor extends DefaultVisitor {

+

+    @Override

+    public Mutable<ILogicalOperator> visit(LimitOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        Schema currentSchema = t.generateInputSchema(operator.getParentOperators().get(0));

+

+        LimitDesc desc = (LimitDesc) operator.getConf();

+        int limit = desc.getLimit();

+        Integer limitValue = new Integer(limit);

+

+        ILogicalExpression expr = new ConstantExpression(new HivesterixConstantValue(limitValue));

+        ILogicalOperator currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator(

+                expr, true);

+        currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+

+        operator.setSchema(operator.getParentOperators().get(0).getSchema());

+        List<LogicalVariable> latestOutputSchema = t.getVariablesFromSchema(currentSchema);

+        t.rewriteOperatorOutputSchema(latestOutputSchema, operator);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

+

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/MapJoinVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/MapJoinVisitor.java
new file mode 100644
index 0000000..4aba6a4
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/MapJoinVisitor.java
@@ -0,0 +1,171 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.io.Serializable;

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.Iterator;

+import java.util.List;

+import java.util.Map;

+import java.util.Map.Entry;

+import java.util.Set;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.ColumnInfo;

+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;

+import org.apache.hadoop.hive.ql.plan.MapJoinDesc;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;

+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;

+

+@SuppressWarnings("rawtypes")

+public class MapJoinVisitor extends DefaultVisitor {

+

+    /**

+     * map a join operator (in hive) to its parent operators (in asterix)

+     */

+    private HashMap<Operator, List<Mutable<ILogicalOperator>>> opMap = new HashMap<Operator, List<Mutable<ILogicalOperator>>>();

+

+    @Override

+    public Mutable<ILogicalOperator> visit(MapJoinOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {

+        List<Operator<? extends Serializable>> joinSrc = operator.getParentOperators();

+        List<Mutable<ILogicalOperator>> parents = opMap.get(operator);

+        if (parents == null) {

+            parents = new ArrayList<Mutable<ILogicalOperator>>();

+            opMap.put(operator, parents);

+        }

+        parents.add(AlgebricksParentOperatorRef);

+        if (joinSrc.size() != parents.size())

+            return null;

+

+        ILogicalOperator currentOperator;

+        // make an map join operator

+        // TODO: will have trouble for n-way joins

+        MapJoinDesc joinDesc = (MapJoinDesc) operator.getConf();

+

+        Map<Byte, List<ExprNodeDesc>> keyMap = joinDesc.getKeys();

+        // get the projection expression (already re-written) from each source

+        // table

+        Map<Byte, List<ExprNodeDesc>> exprMap = joinDesc.getExprs();

+

+        int inputSize = operator.getParentOperators().size();

+        // get a list of reduce sink descs (input descs)

+

+        // get the parent operator

+        List<Mutable<ILogicalOperator>> parentOps = parents;

+

+        List<String> fieldNames = new ArrayList<String>();

+        List<TypeInfo> types = new ArrayList<TypeInfo>();

+        for (Operator ts : joinSrc) {

+            List<ColumnInfo> columns = ts.getSchema().getSignature();

+            for (ColumnInfo col : columns) {

+                fieldNames.add(col.getInternalName());

+                types.add(col.getType());

+            }

+        }

+

+        // get number of equality conjunctions in the final join condition

+        Set<Entry<Byte, List<ExprNodeDesc>>> keyEntries = keyMap.entrySet();

+        Iterator<Entry<Byte, List<ExprNodeDesc>>> entry = keyEntries.iterator();

+

+        int size = 0;

+        if (entry.hasNext())

+            size = entry.next().getValue().size();

+

+        // make up the join conditon expression

+        List<ExprNodeDesc> joinConditionChildren = new ArrayList<ExprNodeDesc>();

+        for (int i = 0; i < size; i++) {

+            // create a join key pair

+            List<ExprNodeDesc> keyPair = new ArrayList<ExprNodeDesc>();

+            for (int j = 0; j < inputSize; j++) {

+                keyPair.add(keyMap.get(Byte.valueOf((byte) j)).get(i));

+            }

+            // create a hive equal condition

+            ExprNodeDesc equality = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,

+                    new GenericUDFOPEqual(), keyPair);

+            // add the equal condition to the conjunction list

+            joinConditionChildren.add(equality);

+        }

+        // get final conjunction expression

+        ExprNodeDesc conjunct = null;

+

+        if (joinConditionChildren.size() > 1)

+            conjunct = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(),

+                    joinConditionChildren);

+        else if (joinConditionChildren.size() == 1)

+            conjunct = joinConditionChildren.get(0);

+        else {

+            // there is no join equality condition, full outer join

+            conjunct = new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, new Boolean(true));

+        }

+        // get an ILogicalExpression from hive's expression

+        Mutable<ILogicalExpression> expression = t.translateScalarFucntion(conjunct);

+

+        ArrayList<LogicalVariable> left = new ArrayList<LogicalVariable>();

+        ArrayList<LogicalVariable> right = new ArrayList<LogicalVariable>();

+

+        Set<Entry<Byte, List<ExprNodeDesc>>> kentries = keyMap.entrySet();

+        Iterator<Entry<Byte, List<ExprNodeDesc>>> kiterator = kentries.iterator();

+        int iteration = 0;

+        ILogicalOperator assignOperator = null;

+        while (kiterator.hasNext()) {

+            List<ExprNodeDesc> outputExprs = kiterator.next().getValue();

+

+            if (iteration == 0)

+                assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef, outputExprs, left);

+            else

+                assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef, outputExprs, right);

+

+            if (assignOperator != null) {

+                currentOperator = assignOperator;

+                AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+            }

+            iteration++;

+        }

+

+        List<Mutable<ILogicalOperator>> inputs = parentOps;

+

+        // get the join operator

+        currentOperator = new InnerJoinOperator(expression);

+

+        // set the inputs from asterix join operator

+        for (Mutable<ILogicalOperator> input : inputs)

+            currentOperator.getInputs().add(input);

+        AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+

+        // add assign and project operator

+        // output variables

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        Set<Entry<Byte, List<ExprNodeDesc>>> entries = exprMap.entrySet();

+        Iterator<Entry<Byte, List<ExprNodeDesc>>> iterator = entries.iterator();

+        while (iterator.hasNext()) {

+            List<ExprNodeDesc> outputExprs = iterator.next().getValue();

+            assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef, outputExprs, variables);

+

+            if (assignOperator != null) {

+                currentOperator = assignOperator;

+                AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+            }

+        }

+

+        currentOperator = new ProjectOperator(variables);

+        currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+        t.rewriteOperatorOutputSchema(variables, operator);

+        // opMap.clear();

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ProjectVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ProjectVisitor.java
new file mode 100644
index 0000000..eb0922f
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ProjectVisitor.java
@@ -0,0 +1,56 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.SelectOperator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.SelectDesc;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;

+

+public class ProjectVisitor extends DefaultVisitor {

+

+    /**

+     * translate project operator

+     */

+    @Override

+    public Mutable<ILogicalOperator> visit(SelectOperator operator, Mutable<ILogicalOperator> AlgebricksParentOperator,

+            Translator t) {

+

+        SelectDesc desc = (SelectDesc) operator.getConf();

+

+        if (desc == null)

+            return null;

+

+        List<ExprNodeDesc> cols = desc.getColList();

+

+        if (cols == null)

+            return null;

+

+        // insert assign operator if necessary

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+

+        for (ExprNodeDesc expr : cols)

+            t.rewriteExpression(expr);

+

+        ILogicalOperator assignOp = t.getAssignOperator(AlgebricksParentOperator, cols, variables);

+        ILogicalOperator currentOperator = null;

+        if (assignOp != null) {

+            currentOperator = assignOp;

+            AlgebricksParentOperator = new MutableObject<ILogicalOperator>(currentOperator);

+        }

+

+        currentOperator = new ProjectOperator(variables);

+        currentOperator.getInputs().add(AlgebricksParentOperator);

+        t.rewriteOperatorOutputSchema(variables, operator);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

+

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/SortVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/SortVisitor.java
new file mode 100644
index 0000000..325b632
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/SortVisitor.java
@@ -0,0 +1,113 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.ExtractOperator;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;

+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.OrderColumn;

+

+public class SortVisitor extends DefaultVisitor {

+

+    @SuppressWarnings("rawtypes")

+    @Override

+    public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException {

+        ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();

+        Operator downStream = (Operator) operator.getChildOperators().get(0);

+        List<ExprNodeDesc> keys = desc.getKeyCols();

+        if (!(downStream instanceof ExtractOperator && desc.getNumReducers() == 1 && keys.size() > 0)) {

+            return null;

+        }

+

+        List<ExprNodeDesc> schema = new ArrayList<ExprNodeDesc>();

+        List<ExprNodeDesc> values = desc.getValueCols();

+        List<ExprNodeDesc> partitionCols = desc.getPartitionCols();

+        for (ExprNodeDesc key : keys) {

+            t.rewriteExpression(key);

+        }

+        for (ExprNodeDesc value : values) {

+            t.rewriteExpression(value);

+        }

+        for (ExprNodeDesc col : partitionCols) {

+            t.rewriteExpression(col);

+        }

+

+        // add a order-by operator and limit if any

+        List<Pair<IOrder, Mutable<ILogicalExpression>>> pairs = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>();

+        char[] orders = desc.getOrder().toCharArray();

+        int i = 0;

+        for (ExprNodeDesc key : keys) {

+            Mutable<ILogicalExpression> expr = t.translateScalarFucntion(key);

+            IOrder order = orders[i] == '+' ? OrderOperator.ASC_ORDER : OrderOperator.DESC_ORDER;

+

+            Pair<IOrder, Mutable<ILogicalExpression>> pair = new Pair<IOrder, Mutable<ILogicalExpression>>(order, expr);

+            pairs.add(pair);

+            i++;

+        }

+

+        // get input variables

+        ArrayList<LogicalVariable> inputVariables = new ArrayList<LogicalVariable>();

+        VariableUtilities.getProducedVariables(AlgebricksParentOperatorRef.getValue(), inputVariables);

+

+        ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();

+        ILogicalOperator currentOperator;

+        ILogicalOperator assignOp = t.getAssignOperator(AlgebricksParentOperatorRef, keys, keyVariables);

+        if (assignOp != null) {

+            currentOperator = assignOp;

+            AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+        }

+

+        OrderColumn[] keyColumns = new OrderColumn[keyVariables.size()];

+

+        for (int j = 0; j < keyColumns.length; j++)

+            keyColumns[j] = new OrderColumn(keyVariables.get(j), pairs.get(j).first.getKind());

+

+        // handle order operator

+        currentOperator = new OrderOperator(pairs);

+        currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+        AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+

+        // project back, remove generated sort-key columns if any

+        if (assignOp != null) {

+            currentOperator = new ProjectOperator(inputVariables);

+            currentOperator.getInputs().add(AlgebricksParentOperatorRef);

+            AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+        }

+

+        /**

+         * a special rule for hive's order by output schema of reduce sink

+         * operator only contains the columns

+         */

+        for (ExprNodeDesc value : values) {

+            schema.add(value);

+        }

+

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        ILogicalOperator assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef, schema, variables);

+        t.rewriteOperatorOutputSchema(variables, operator);

+

+        if (assignOperator != null) {

+            currentOperator = assignOperator;

+            AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);

+        }

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/TableScanWriteVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/TableScanWriteVisitor.java
new file mode 100644
index 0000000..3af1832
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/TableScanWriteVisitor.java
@@ -0,0 +1,135 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.HashMap;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.ColumnInfo;

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.TableScanOperator;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+import org.apache.hadoop.hive.ql.plan.TableScanDesc;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveDataSink;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveDataSource;

+import edu.uci.ics.hivesterix.runtime.jobgen.HiveMetaDataProvider;

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSink;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;

+

+public class TableScanWriteVisitor extends DefaultVisitor {

+

+    /**

+     * map from alias to partition desc

+     */

+    private HashMap<String, PartitionDesc> aliasToPathMap;

+

+    /**

+     * map from partition desc to data source

+     */

+    private HashMap<PartitionDesc, IDataSource<PartitionDesc>> dataSourceMap = new HashMap<PartitionDesc, IDataSource<PartitionDesc>>();

+

+    /**

+     * constructor

+     * 

+     * @param aliasToPathMap

+     */

+    public TableScanWriteVisitor(HashMap<String, PartitionDesc> aliasToPathMap) {

+        this.aliasToPathMap = aliasToPathMap;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(TableScanOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        TableScanDesc desc = (TableScanDesc) operator.getConf();

+        if (desc == null) {

+            List<LogicalVariable> schema = new ArrayList<LogicalVariable>();

+            VariableUtilities.getLiveVariables(AlgebricksParentOperator.getValue(), schema);

+            t.rewriteOperatorOutputSchema(schema, operator);

+            return null;

+        }

+

+        List<ColumnInfo> columns = operator.getSchema().getSignature();

+        for (int i = columns.size() - 1; i >= 0; i--)

+            if (columns.get(i).getIsVirtualCol() == true)

+                columns.remove(i);

+

+        // start with empty tuple operator

+        List<TypeInfo> types = new ArrayList<TypeInfo>();

+        ArrayList<LogicalVariable> variables = new ArrayList<LogicalVariable>();

+        List<String> names = new ArrayList<String>();

+        for (ColumnInfo column : columns) {

+            types.add(column.getType());

+

+            LogicalVariable var = t.getVariableFromFieldName(column.getTabAlias() + "." + column.getInternalName());

+            LogicalVariable varNew;

+

+            if (var != null) {

+                varNew = t.getVariable(column.getTabAlias() + "." + column.getInternalName() + operator.toString(),

+                        column.getType());

+                t.replaceVariable(var, varNew);

+                var = varNew;

+            } else

+                var = t.getNewVariable(column.getTabAlias() + "." + column.getInternalName(), column.getType());

+

+            variables.add(var);

+            names.add(column.getInternalName());

+        }

+        Schema currentSchema = new Schema(names, types);

+

+        String alias = desc.getAlias();

+        PartitionDesc partDesc = aliasToPathMap.get(alias);

+        IDataSource<PartitionDesc> dataSource = new HiveDataSource<PartitionDesc>(partDesc, currentSchema.getSchema());

+        ILogicalOperator currentOperator = new DataSourceScanOperator(variables, dataSource);

+

+        // set empty tuple source operator

+        ILogicalOperator ets = new EmptyTupleSourceOperator();

+        currentOperator.getInputs().add(new MutableObject<ILogicalOperator>(ets));

+

+        // setup data source

+        dataSourceMap.put(partDesc, dataSource);

+        t.rewriteOperatorOutputSchema(variables, operator);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) {

+

+        if (hiveOperator.getChildOperators() != null && hiveOperator.getChildOperators().size() > 0)

+            return null;

+

+        Schema currentSchema = t.generateInputSchema(hiveOperator.getParentOperators().get(0));

+

+        IDataSink sink = new HiveDataSink(hiveOperator, currentSchema.getSchema());

+        List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();

+        for (String column : currentSchema.getNames()) {

+            exprList.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(t.getVariable(column))));

+        }

+

+        ILogicalOperator currentOperator = new WriteOperator(exprList, sink);

+        if (AlgebricksParentOperator != null) {

+            currentOperator.getInputs().add(AlgebricksParentOperator);

+        }

+

+        IMetadataProvider<PartitionDesc, Object> metaData = new HiveMetaDataProvider<PartitionDesc, Object>(

+                hiveOperator, currentSchema, dataSourceMap);

+        t.setMetadataProvider(metaData);

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/UnionVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/UnionVisitor.java
new file mode 100644
index 0000000..96b9463
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/UnionVisitor.java
@@ -0,0 +1,62 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.commons.lang3.mutable.MutableObject;

+import org.apache.hadoop.hive.ql.exec.UnionOperator;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;

+

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;

+import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;

+

+public class UnionVisitor extends DefaultVisitor {

+

+    List<Mutable<ILogicalOperator>> parents = new ArrayList<Mutable<ILogicalOperator>>();

+

+    @Override

+    public Mutable<ILogicalOperator> visit(UnionOperator operator, Mutable<ILogicalOperator> AlgebricksParentOperator,

+            Translator t) throws AlgebricksException {

+

+        parents.add(AlgebricksParentOperator);

+        if (operator.getParentOperators().size() > parents.size()) {

+            return null;

+        }

+

+        List<LogicalVariable> leftVars = new ArrayList<LogicalVariable>();

+        List<LogicalVariable> rightVars = new ArrayList<LogicalVariable>();

+

+        VariableUtilities.getUsedVariables(parents.get(0).getValue(), leftVars);

+        VariableUtilities.getUsedVariables(parents.get(1).getValue(), rightVars);

+

+        List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> triples = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>();

+        List<LogicalVariable> unionVars = new ArrayList<LogicalVariable>();

+

+        for (int i = 0; i < leftVars.size(); i++) {

+            LogicalVariable unionVar = t.getVariable(

+                    leftVars.get(i).getId() + "union" + AlgebricksParentOperator.hashCode(),

+                    TypeInfoFactory.unknownTypeInfo);

+            unionVars.add(unionVar);

+            Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(

+                    leftVars.get(i), rightVars.get(i), unionVar);

+            t.replaceVariable(leftVars.get(i), unionVar);

+            t.replaceVariable(rightVars.get(i), unionVar);

+            triples.add(triple);

+        }

+        ILogicalOperator currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator(

+                triples);

+        for (Mutable<ILogicalOperator> parent : parents)

+            currentOperator.getInputs().add(parent);

+

+        t.rewriteOperatorOutputSchema(unionVars, operator);

+        parents.clear();

+        return new MutableObject<ILogicalOperator>(currentOperator);

+    }

+

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/DefaultVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/DefaultVisitor.java
new file mode 100644
index 0000000..d298553
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/DefaultVisitor.java
@@ -0,0 +1,145 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor.base;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.CollectOperator;

+import org.apache.hadoop.hive.ql.exec.ExtractOperator;

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.FilterOperator;

+import org.apache.hadoop.hive.ql.exec.ForwardOperator;

+import org.apache.hadoop.hive.ql.exec.GroupByOperator;

+import org.apache.hadoop.hive.ql.exec.JoinOperator;

+import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator;

+import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;

+import org.apache.hadoop.hive.ql.exec.LimitOperator;

+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.MapOperator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.ScriptOperator;

+import org.apache.hadoop.hive.ql.exec.SelectOperator;

+import org.apache.hadoop.hive.ql.exec.TableScanOperator;

+import org.apache.hadoop.hive.ql.exec.UDTFOperator;

+import org.apache.hadoop.hive.ql.exec.UnionOperator;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+

+/**

+ * a default empty implementation of visitor

+ * 

+ * @author yingyib

+ */

+public class DefaultVisitor implements Visitor {

+

+    @Override

+    public Mutable<ILogicalOperator> visit(CollectOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(JoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(ExtractOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(MapJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(SMBMapJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    public Mutable<ILogicalOperator> visit(ReduceSinkOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(FilterOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(ForwardOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(GroupByOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(LateralViewForwardOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(LateralViewJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(LimitOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(MapOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(ScriptOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(SelectOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(TableScanOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperator, Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(UDTFOperator operator, Mutable<ILogicalOperator> AlgebricksParentOperator,

+            Translator t) throws AlgebricksException {

+        return null;

+    }

+

+    @Override

+    public Mutable<ILogicalOperator> visit(UnionOperator operator, Mutable<ILogicalOperator> AlgebricksParentOperator,

+            Translator t) throws AlgebricksException {

+        return null;

+    }

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Translator.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Translator.java
new file mode 100644
index 0000000..8aa139a
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Translator.java
@@ -0,0 +1,170 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor.base;

+

+import java.util.ArrayList;

+import java.util.List;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.Operator;

+import org.apache.hadoop.hive.ql.plan.AggregationDesc;

+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;

+import org.apache.hadoop.hive.ql.plan.PartitionDesc;

+import org.apache.hadoop.hive.ql.plan.UDTFDesc;

+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;

+

+import edu.uci.ics.hivesterix.runtime.jobgen.Schema;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;

+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;

+

+@SuppressWarnings("rawtypes")

+public interface Translator {

+

+    /**

+     * generate input schema

+     * 

+     * @param operator

+     * @return

+     */

+    public Schema generateInputSchema(Operator operator);

+

+    /**

+     * rewrite the names of output columns for feture expression evaluators to

+     * use

+     * 

+     * @param operator

+     */

+    public void rewriteOperatorOutputSchema(List<LogicalVariable> vars, Operator operator);

+

+    /**

+     * rewrite the names of output columns for feture expression evaluators to

+     * use

+     * 

+     * @param operator

+     */

+    public void rewriteOperatorOutputSchema(Operator operator);

+

+    /**

+     * rewrite an expression and substitute variables

+     * 

+     * @param expr

+     *            hive expression

+     */

+    public void rewriteExpression(ExprNodeDesc expr);

+

+    /**

+     * rewrite an expression and substitute variables

+     * 

+     * @param expr

+     *            hive expression

+     */

+    public void rewriteExpressionPartial(ExprNodeDesc expr);

+

+    /**

+     * get an assign operator as a child of parent

+     * 

+     * @param parent

+     * @param cols

+     * @param variables

+     * @return

+     */

+    public ILogicalOperator getAssignOperator(Mutable<ILogicalOperator> parent, List<ExprNodeDesc> cols,

+            ArrayList<LogicalVariable> variables);

+

+    /**

+     * get type for a logical variable

+     * 

+     * @param var

+     * @return type info

+     */

+    public TypeInfo getType(LogicalVariable var);

+

+    /**

+     * translate an expression from hive to Algebricks

+     * 

+     * @param desc

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateScalarFucntion(ExprNodeDesc desc);

+

+    /**

+     * translate an aggregation from hive to Algebricks

+     * 

+     * @param aggregateDesc

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateAggregation(AggregationDesc aggregateDesc);

+

+    /**

+     * translate unnesting (UDTF) function expression

+     * 

+     * @param aggregator

+     * @return

+     */

+    public Mutable<ILogicalExpression> translateUnnestFunction(UDTFDesc udtfDesc, Mutable<ILogicalExpression> argument);

+

+    /**

+     * get variable from a schema

+     * 

+     * @param schema

+     * @return

+     */

+    public List<LogicalVariable> getVariablesFromSchema(Schema schema);

+

+    /**

+     * get variable from name

+     * 

+     * @param name

+     * @return

+     */

+    public LogicalVariable getVariable(String name);

+

+    /**

+     * get variable from field name

+     * 

+     * @param name

+     * @return

+     */

+    public LogicalVariable getVariableFromFieldName(String name);

+

+    /**

+     * get variable from name, type

+     * 

+     * @param fieldName

+     * @param type

+     * @return

+     */

+    public LogicalVariable getVariable(String fieldName, TypeInfo type);

+

+    /**

+     * get new variable from name, type

+     * 

+     * @param fieldName

+     * @param type

+     * @return

+     */

+    public LogicalVariable getNewVariable(String fieldName, TypeInfo type);

+

+    /**

+     * set the metadata provider

+     * 

+     * @param metadata

+     */

+    public void setMetadataProvider(IMetadataProvider<PartitionDesc, Object> metadata);

+

+    /**

+     * get the metadata provider

+     * 

+     * @param metadata

+     */

+    public IMetadataProvider<PartitionDesc, Object> getMetadataProvider();

+

+    /**

+     * replace the variable

+     * 

+     * @param oldVar

+     * @param newVar

+     */

+    public void replaceVariable(LogicalVariable oldVar, LogicalVariable newVar);

+

+}

diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Visitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Visitor.java
new file mode 100644
index 0000000..11ae357
--- /dev/null
+++ b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/base/Visitor.java
@@ -0,0 +1,85 @@
+package edu.uci.ics.hivesterix.logical.plan.visitor.base;

+

+import org.apache.commons.lang3.mutable.Mutable;

+import org.apache.hadoop.hive.ql.exec.CollectOperator;

+import org.apache.hadoop.hive.ql.exec.ExtractOperator;

+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;

+import org.apache.hadoop.hive.ql.exec.FilterOperator;

+import org.apache.hadoop.hive.ql.exec.ForwardOperator;

+import org.apache.hadoop.hive.ql.exec.GroupByOperator;

+import org.apache.hadoop.hive.ql.exec.JoinOperator;

+import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator;

+import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;

+import org.apache.hadoop.hive.ql.exec.LimitOperator;

+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.MapOperator;

+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;

+import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;

+import org.apache.hadoop.hive.ql.exec.ScriptOperator;

+import org.apache.hadoop.hive.ql.exec.SelectOperator;

+import org.apache.hadoop.hive.ql.exec.TableScanOperator;

+import org.apache.hadoop.hive.ql.exec.UDTFOperator;

+import org.apache.hadoop.hive.ql.exec.UnionOperator;

+

+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;

+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;

+

+public interface Visitor {

+

+    public Mutable<ILogicalOperator> visit(CollectOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(JoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(ExtractOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(MapJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(SMBMapJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(FilterOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(ForwardOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(GroupByOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(LateralViewForwardOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(LateralViewJoinOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(LimitOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(MapOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(ScriptOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(SelectOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(TableScanOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(FileSinkOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(ReduceSinkOperator hiveOperator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(UDTFOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+

+    public Mutable<ILogicalOperator> visit(UnionOperator operator,

+            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException;

+}

diff --git a/hivesterix/pom.xml b/hivesterix/pom.xml
new file mode 100644
index 0000000..a8ef4ee
--- /dev/null
+++ b/hivesterix/pom.xml
@@ -0,0 +1,139 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<groupId>edu.uci.ics.hyracks</groupId>
+	<artifactId>hivesterix</artifactId>
+	<version>0.2.3-SNAPSHOT</version>
+	<packaging>pom</packaging>
+	<name>hivesterix</name>
+
+	<properties>
+		<jvm.extraargs />
+	</properties>
+
+	<profiles>
+		<profile>
+			<id>macosx</id>
+			<activation>
+				<os>
+					<name>mac os x</name>
+				</os>
+				<jdk>1.7</jdk>
+			</activation>
+			<properties>
+				<jvm.extraargs>-Djava.nio.channels.spi.SelectorProvider=sun.nio.ch.KQueueSelectorProvider</jvm.extraargs>
+			</properties>
+		</profile>
+	</profiles>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-release-plugin</artifactId>
+				<version>2.0</version>
+				<configuration>
+					<goals>package source:jar javadoc:jar deploy:deploy</goals>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.codehaus.mojo</groupId>
+				<artifactId>versions-maven-plugin</artifactId>
+				<version>1.2</version>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.13</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<argLine>-enableassertions
+						-Djava.util.logging.config.file=${user.home}/logging.properties
+						-Xdebug
+						-Xrunjdwp:transport=dt_socket,server=y,address=8000,suspend=n
+						${jvm.extraargs}</argLine>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<reporting>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-changelog-plugin</artifactId>
+				<version>2.2</version>
+			</plugin>
+		</plugins>
+	</reporting>
+
+	<distributionManagement>
+		<repository>
+			<id>hyracks-releases</id>
+			<url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-releases/</url>
+		</repository>
+		<snapshotRepository>
+			<id>hyracks-snapshots</id>
+			<url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-snapshots/</url>
+		</snapshotRepository>
+	</distributionManagement>
+
+	<repositories>
+		<repository>
+			<id>hyracks-public</id>
+			<url>http://obelix.ics.uci.edu/nexus/content/groups/hyracks-public/</url>
+		</repository>
+		<repository>
+			<id>jboss-public</id>
+			<url>https://repository.jboss.org/nexus/content/groups/public/</url>
+		</repository>
+		<repository>
+			<releases>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>warn</checksumPolicy>
+			</releases>
+			<snapshots>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>fail</checksumPolicy>
+			</snapshots>
+			<id>third-party</id>
+			<url>http://obelix.ics.uci.edu/nexus/content/repositories/third-party</url>
+		</repository>
+		<repository>
+			<releases>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>warn</checksumPolicy>
+			</releases>
+			<snapshots>
+				<enabled>true</enabled>
+				<updatePolicy>always</updatePolicy>
+				<checksumPolicy>fail</checksumPolicy>
+			</snapshots>
+			<id>hyracks-public-release</id>
+			<url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-public-releases</url>
+		</repository>
+	</repositories>
+
+	<pluginRepositories>
+		<pluginRepository>
+			<id>hyracks-public</id>
+			<url>http://obelix.ics.uci.edu/nexus/content/groups/hyracks-public/</url>
+			<releases>
+				<updatePolicy>always</updatePolicy>
+			</releases>
+		</pluginRepository>
+	</pluginRepositories>
+
+	<modules>
+		<module>hivesterix-runtime</module>
+		<module>hivesterix-translator</module>
+		<module>hivesterix-optimizer</module>
+		<module>hivesterix-serde</module>
+		<module>hivesterix-dist</module>
+		<module>hivesterix-common</module>
+	</modules>
+</project>
diff --git a/hyracks/hyracks-api/pom.xml b/hyracks/hyracks-api/pom.xml
index 72f0d8b..6807f76 100644
--- a/hyracks/hyracks-api/pom.xml
+++ b/hyracks/hyracks-api/pom.xml
@@ -15,8 +15,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannel.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannel.java
index a8f2fda..a4f0b29 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannel.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/channels/IInputChannel.java
@@ -16,7 +16,7 @@
 
 import java.nio.ByteBuffer;
 
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 
 public interface IInputChannel {
@@ -30,7 +30,7 @@
 
     public void recycleBuffer(ByteBuffer buffer);
 
-    public void open(IHyracksTaskContext ctx) throws HyracksDataException;
+    public void open(IHyracksCommonContext ctx) throws HyracksDataException;
 
     public void close() throws HyracksDataException;
 }
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java
index e34e60d..cd2b698 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java
@@ -17,6 +17,8 @@
 import java.io.Serializable;
 import java.util.EnumSet;
 
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.job.JobFlag;
 import edu.uci.ics.hyracks.api.job.JobId;
 
@@ -30,6 +32,10 @@
         CREATE_JOB,
         GET_JOB_STATUS,
         START_JOB,
+        GET_DATASET_DIRECTORY_SERIVICE_INFO,
+        GET_DATASET_RESULT_STATUS,
+        GET_DATASET_RECORD_DESCRIPTOR,
+        GET_DATASET_RESULT_LOCATIONS,
         WAIT_FOR_COMPLETION,
         GET_NODE_CONTROLLERS_INFO
     }
@@ -156,6 +162,74 @@
         }
     }
 
+    public static class GetDatasetDirectoryServiceInfoFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.GET_DATASET_DIRECTORY_SERIVICE_INFO;
+        }
+    }
+
+    public static class GetDatasetResultStatusFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final JobId jobId;
+
+        private final ResultSetId rsId;
+
+        public GetDatasetResultStatusFunction(JobId jobId, ResultSetId rsId) {
+            this.jobId = jobId;
+            this.rsId = rsId;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.GET_DATASET_RESULT_STATUS;
+        }
+
+        public JobId getJobId() {
+            return jobId;
+        }
+
+        public ResultSetId getResultSetId() {
+            return rsId;
+        }
+    }
+
+    public static class GetDatasetResultLocationsFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final JobId jobId;
+
+        private final ResultSetId rsId;
+
+        private final DatasetDirectoryRecord[] knownRecords;
+
+        public GetDatasetResultLocationsFunction(JobId jobId, ResultSetId rsId, DatasetDirectoryRecord[] knownRecords) {
+            this.jobId = jobId;
+            this.rsId = rsId;
+            this.knownRecords = knownRecords;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.GET_DATASET_RESULT_LOCATIONS;
+        }
+
+        public JobId getJobId() {
+            return jobId;
+        }
+
+        public ResultSetId getResultSetId() {
+            return rsId;
+        }
+
+        public DatasetDirectoryRecord[] getKnownRecords() {
+            return knownRecords;
+        }
+    }
+
     public static class WaitForCompletionFunction extends Function {
         private static final long serialVersionUID = 1L;
 
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
index 4c06d42..2ab42c0 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
@@ -17,6 +17,7 @@
 import java.util.EnumSet;
 import java.util.Map;
 
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
 import edu.uci.ics.hyracks.api.job.JobFlag;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.job.JobStatus;
@@ -76,6 +77,12 @@
     }
 
     @Override
+    public NetworkAddress getDatasetDirectoryServiceInfo() throws Exception {
+        HyracksClientInterfaceFunctions.GetDatasetDirectoryServiceInfoFunction gddsf = new HyracksClientInterfaceFunctions.GetDatasetDirectoryServiceInfoFunction();
+        return (NetworkAddress) rpci.call(ipcHandle, gddsf);
+    }
+
+    @Override
     public void waitForCompletion(JobId jobId) throws Exception {
         HyracksClientInterfaceFunctions.WaitForCompletionFunction wfcf = new HyracksClientInterfaceFunctions.WaitForCompletionFunction(
                 jobId);
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java
index 227524c..e0fafb0 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java
@@ -26,6 +26,7 @@
 import org.apache.http.impl.client.DefaultHttpClient;
 
 import edu.uci.ics.hyracks.api.client.impl.JobSpecificationActivityClusterGraphGeneratorFactory;
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
 import edu.uci.ics.hyracks.api.exceptions.HyracksException;
 import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
 import edu.uci.ics.hyracks.api.job.JobFlag;
@@ -118,6 +119,10 @@
         return hci.startJob(appName, JavaSerializationUtils.serialize(acggf), jobFlags);
     }
 
+    public NetworkAddress getDatasetDirectoryServiceInfo() throws Exception {
+        return hci.getDatasetDirectoryServiceInfo();
+    }
+
     @Override
     public void waitForCompletion(JobId jobId) throws Exception {
         hci.waitForCompletion(jobId);
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
index bdbb544..6333c22 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
@@ -18,6 +18,7 @@
 import java.util.EnumSet;
 import java.util.Map;
 
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
 import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
 import edu.uci.ics.hyracks.api.job.JobFlag;
 import edu.uci.ics.hyracks.api.job.JobId;
@@ -100,6 +101,14 @@
             throws Exception;
 
     /**
+     * Gets the IP Address and port for the DatasetDirectoryService wrapped in NetworkAddress
+     * 
+     * @return {@link NetworkAddress}
+     * @throws Exception
+     */
+    public NetworkAddress getDatasetDirectoryServiceInfo() throws Exception;
+
+    /**
      * Waits until the specified job has completed, either successfully or has
      * encountered a permanent failure.
      * 
@@ -123,4 +132,4 @@
      * @throws Exception
      */
     public ClusterTopology getClusterTopology() throws Exception;
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
index ef5906e..22b0a8f 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
@@ -17,6 +17,7 @@
 import java.util.EnumSet;
 import java.util.Map;
 
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
 import edu.uci.ics.hyracks.api.job.JobFlag;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.job.JobStatus;
@@ -35,6 +36,8 @@
 
     public JobId startJob(String appName, byte[] acggfBytes, EnumSet<JobFlag> jobFlags) throws Exception;
 
+    public NetworkAddress getDatasetDirectoryServiceInfo() throws Exception;
+
     public void waitForCompletion(JobId jobId) throws Exception;
 
     public Map<String, NodeControllerInfo> getNodeControllersInfo() throws Exception;
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeControllerInfo.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeControllerInfo.java
index fd9218a..73b5488 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeControllerInfo.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/NodeControllerInfo.java
@@ -27,10 +27,14 @@
 
     private final NetworkAddress netAddress;
 
-    public NodeControllerInfo(String nodeId, NodeStatus status, NetworkAddress netAddress) {
+    private final NetworkAddress datasetNetworkAddress;
+
+    public NodeControllerInfo(String nodeId, NodeStatus status, NetworkAddress netAddress,
+            NetworkAddress datasetNetworkAddress) {
         this.nodeId = nodeId;
         this.status = status;
         this.netAddress = netAddress;
+        this.datasetNetworkAddress = datasetNetworkAddress;
     }
 
     public String getNodeId() {
@@ -44,4 +48,8 @@
     public NetworkAddress getNetworkAddress() {
         return netAddress;
     }
+
+    public NetworkAddress getDatasetNetworkAddress() {
+        return datasetNetworkAddress;
+    }
 }
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobSpecificationActivityClusterGraphGeneratorFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobSpecificationActivityClusterGraphGeneratorFactory.java
index f36b7b3..0eac9a2 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobSpecificationActivityClusterGraphGeneratorFactory.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/impl/JobSpecificationActivityClusterGraphGeneratorFactory.java
@@ -17,6 +17,7 @@
 import edu.uci.ics.hyracks.api.job.JobFlag;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.api.rewriter.ActivityClusterGraphRewriter;
 
 public class JobSpecificationActivityClusterGraphGeneratorFactory implements IActivityClusterGraphGeneratorFactory {
     private static final long serialVersionUID = 1L;
@@ -78,6 +79,8 @@
         return new IActivityClusterGraphGenerator() {
             @Override
             public ActivityClusterGraph initialize() {
+                ActivityClusterGraphRewriter rewriter = new ActivityClusterGraphRewriter();
+                rewriter.rewrite(acg);
                 return acg;
             }
 
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java
index e964d66..a2ee977 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java
@@ -15,6 +15,7 @@
 package edu.uci.ics.hyracks.api.context;
 
 import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
 import edu.uci.ics.hyracks.api.io.IWorkspaceFileFactory;
 import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
 import edu.uci.ics.hyracks.api.job.profiling.counters.ICounterContext;
@@ -28,5 +29,7 @@
 
     public ICounterContext getCounterContext();
 
+    public IDatasetPartitionManager getDatasetPartitionManager();
+
     public void sendApplicationMessageToCC(byte[] message, String nodeId) throws Exception;
 }
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IResultSerializer.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IResultSerializer.java
new file mode 100644
index 0000000..ba2ff9a
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IResultSerializer.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataflow.value;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public interface IResultSerializer extends Serializable {
+    /**
+     * Initializes the serializer.
+     */
+    public void init() throws HyracksDataException;
+
+    /**
+     * Method to serialize the result and append it to the provided output stream
+     * 
+     * @param tAccess
+     *            - A frame tuple accessor object that contains the original data to be serialized
+     * @param tIdx
+     *            - Index of the tuple that should be serialized.
+     * @return true if the tuple was appended successfully, else false.
+     */
+    public boolean appendTuple(IFrameTupleAccessor tAccess, int tIdx) throws HyracksDataException;
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IResultSerializerFactory.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IResultSerializerFactory.java
new file mode 100644
index 0000000..1fbf00f
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/IResultSerializerFactory.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataflow.value;
+
+import java.io.PrintStream;
+import java.io.Serializable;
+
+public interface IResultSerializerFactory extends Serializable {
+    /**
+     * Creates a result serialized appender
+     * 
+     * @param printStream
+     *            - A print stream object to which the serialized results will be written.
+     * @return A new instance of result serialized appender.
+     */
+    public IResultSerializer createResultSerializer(RecordDescriptor recordDesc, PrintStream printStream);
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/JSONSerializable.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/JSONSerializable.java
new file mode 100644
index 0000000..1eca502
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/JSONSerializable.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataflow.value;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+
+public interface JSONSerializable {
+    /**
+     * Returns the JSON representation of the object.
+     * 
+     * @return A new JSONObject instance representing this Java object.
+     */
+    public JSONObject toJSON() throws JSONException;
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/DatasetDirectoryRecord.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/DatasetDirectoryRecord.java
new file mode 100644
index 0000000..6316bba
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/DatasetDirectoryRecord.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+
+public class DatasetDirectoryRecord implements Serializable {
+    public enum Status {
+        IDLE,
+        RUNNING,
+        SUCCESS,
+        FAILED
+    }
+
+    private static final long serialVersionUID = 1L;
+
+    private NetworkAddress address;
+
+    private boolean readEOS;
+
+    private Status status;
+
+    public DatasetDirectoryRecord() {
+        this.address = null;
+        this.readEOS = false;
+        this.status = Status.IDLE;
+    }
+
+    public void setNetworkAddress(NetworkAddress address) {
+        this.address = address;
+    }
+
+    public NetworkAddress getNetworkAddress() {
+        return address;
+    }
+
+    public void readEOS() {
+        this.readEOS = true;
+    }
+
+    public boolean hasReachedReadEOS() {
+        return readEOS;
+    }
+
+    public void start() {
+        status = Status.RUNNING;
+    }
+
+    public void writeEOS() {
+        status = Status.SUCCESS;
+    }
+
+    public void fail() {
+        status = Status.FAILED;
+    }
+
+    public Status getStatus() {
+        return status;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == this) {
+            return true;
+        }
+        if (!(o instanceof DatasetDirectoryRecord)) {
+            return false;
+        }
+        return address.equals(((DatasetDirectoryRecord) o).address);
+    }
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetDirectoryService.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetDirectoryService.java
new file mode 100644
index 0000000..5266333
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetDirectoryService.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+
+public interface IDatasetDirectoryService {
+    public void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult, int partition,
+            int nPartitions, NetworkAddress networkAddress);
+
+    public void reportResultPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition);
+
+    public void reportResultPartitionFailure(JobId jobId, ResultSetId rsId, int partition);
+
+    public Status getResultStatus(JobId jobId, ResultSetId rsId) throws HyracksDataException;
+
+    public DatasetDirectoryRecord[] getResultPartitionLocations(JobId jobId, ResultSetId rsId,
+            DatasetDirectoryRecord[] knownLocations) throws HyracksDataException;
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetInputChannelMonitor.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetInputChannelMonitor.java
new file mode 100644
index 0000000..65ba1c7
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetInputChannelMonitor.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import edu.uci.ics.hyracks.api.channels.IInputChannelMonitor;
+
+public interface IDatasetInputChannelMonitor extends IInputChannelMonitor {
+    public boolean eosReached();
+
+    public boolean failed();
+
+    public int getNFramesAvailable();
+
+    public void notifyFrameRead();
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionManager.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionManager.java
new file mode 100644
index 0000000..ae38c7f
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionManager.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.io.IWorkspaceFileFactory;
+import edu.uci.ics.hyracks.api.job.JobId;
+
+public interface IDatasetPartitionManager {
+    public IFrameWriter createDatasetPartitionWriter(IHyracksTaskContext ctx, ResultSetId rsId, boolean orderedResult,
+            int partition, int nPartitions) throws HyracksException;
+
+    public void reportPartitionWriteCompletion(JobId jobId, ResultSetId resultSetId, int partition)
+            throws HyracksException;
+
+    public void reportPartitionFailure(JobId jobId, ResultSetId resultSetId, int partition) throws HyracksException;
+
+    public void initializeDatasetPartitionReader(JobId jobId, int partition, IFrameWriter noc) throws HyracksException;
+
+    public IWorkspaceFileFactory getFileFactory();
+
+    public void close();
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java
new file mode 100644
index 0000000..8f5ed64
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+
+public interface IDatasetPartitionReader {
+    public void writeTo(IFrameWriter writer);
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionWriter.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionWriter.java
new file mode 100644
index 0000000..42dc157
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionWriter.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public interface IDatasetPartitionWriter extends IFrameWriter {
+    public Page returnPage() throws HyracksDataException;
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDataset.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDataset.java
new file mode 100644
index 0000000..4a7a6b0
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDataset.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+
+public interface IHyracksDataset {
+    public IHyracksDatasetReader createReader(JobId jobId, ResultSetId resultSetId) throws HyracksDataException;
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceConnection.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceConnection.java
new file mode 100644
index 0000000..d49d5cd
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceConnection.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.job.JobId;
+
+public interface IHyracksDatasetDirectoryServiceConnection {
+    /**
+     * Gets the result status for the given result set.
+     * 
+     * @param jobId
+     *            ID of the job
+     * @param rsId
+     *            ID of the result set
+     * @return {@link Status}
+     * @throws Exception
+     */
+    public Status getDatasetResultStatus(JobId jobId, ResultSetId rsId) throws Exception;
+
+    /**
+     * Gets the IP Addresses and ports for the partition generating the result for each location.
+     * 
+     * @param jobId
+     *            ID of the job
+     * @param rsId
+     *            ID of the result set
+     * @param knownRecords
+     *            Locations that are already known to the client
+     * @return {@link NetworkAddress[]}
+     * @throws Exception
+     */
+    public DatasetDirectoryRecord[] getDatasetResultLocations(JobId jobId, ResultSetId rsId,
+            DatasetDirectoryRecord[] knownRecords) throws Exception;
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceInterface.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceInterface.java
new file mode 100644
index 0000000..ba21a84
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceInterface.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.job.JobId;
+
+public interface IHyracksDatasetDirectoryServiceInterface {
+    /**
+     * Gets the result status for the given result set.
+     * 
+     * @param jobId
+     *            ID of the job
+     * @param rsId
+     *            ID of the result set
+     * @return {@link Status}
+     * @throws Exception
+     */
+    public Status getDatasetResultStatus(JobId jobId, ResultSetId rsId) throws Exception;
+
+    /**
+     * Gets the IP Addresses and ports for the partition generating the result for each location.
+     * 
+     * @param jobId
+     *            ID of the job
+     * @param rsId
+     *            ID of the result set
+     * @param knownRecords
+     *            Locations from the dataset directory that are already known to the client
+     * @return {@link NetworkAddress[]}
+     * @throws Exception
+     */
+    public DatasetDirectoryRecord[] getDatasetResultLocations(JobId jobId, ResultSetId rsId,
+            DatasetDirectoryRecord[] knownRecords) throws Exception;
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetReader.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetReader.java
new file mode 100644
index 0000000..b928a49
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetReader.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public interface IHyracksDatasetReader {
+    public Status getResultStatus();
+
+    public int read(ByteBuffer buffer) throws HyracksDataException;
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/Page.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/Page.java
new file mode 100644
index 0000000..7275dfd
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/Page.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import java.nio.ByteBuffer;
+
+public class Page {
+    private final ByteBuffer buffer;
+
+    public Page(ByteBuffer buffer) {
+        this.buffer = buffer;
+    }
+
+    public ByteBuffer getBuffer() {
+        return buffer;
+    }
+
+    public ByteBuffer clear() {
+        return (ByteBuffer) buffer.clear();
+    }
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/ResultSetId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/ResultSetId.java
new file mode 100644
index 0000000..ae38ef3
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/ResultSetId.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.dataset;
+
+import java.io.Serializable;
+
+public class ResultSetId implements Serializable {
+    private static final long serialVersionUID = 1L;
+
+    private final long id;
+
+    public ResultSetId(long id) {
+        this.id = id;
+    }
+
+    public long getId() {
+        return id;
+    }
+
+    @Override
+    public int hashCode() {
+        return (int) id;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == this) {
+            return true;
+        }
+        if (!(o instanceof ResultSetId)) {
+            return false;
+        }
+        return ((ResultSetId) o).id == id;
+    }
+
+    @Override
+    public String toString() {
+        return "RSID:" + id;
+    }
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityCluster.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityCluster.java
index 6698ff7..9fb2b08 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityCluster.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/ActivityCluster.java
@@ -33,7 +33,7 @@
 import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 
-public final class ActivityCluster implements Serializable {
+public class ActivityCluster implements Serializable {
     private static final long serialVersionUID = 1L;
 
     private final ActivityClusterGraph acg;
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSpecification.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSpecification.java
index 7c523f1..1fdff0f 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSpecification.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSpecification.java
@@ -34,12 +34,15 @@
 import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
 import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 
 public class JobSpecification implements Serializable, IOperatorDescriptorRegistry, IConnectorDescriptorRegistry {
     private static final long serialVersionUID = 1L;
 
     private final List<OperatorDescriptorId> roots;
 
+    private final List<ResultSetId> resultSetIds;
+
     private final Map<OperatorDescriptorId, IOperatorDescriptor> opMap;
 
     private final Map<ConnectorDescriptorId, IConnectorDescriptor> connMap;
@@ -72,6 +75,7 @@
 
     public JobSpecification() {
         roots = new ArrayList<OperatorDescriptorId>();
+        resultSetIds = new ArrayList<ResultSetId>();
         opMap = new HashMap<OperatorDescriptorId, IOperatorDescriptor>();
         connMap = new HashMap<ConnectorDescriptorId, IConnectorDescriptor>();
         opInputMap = new HashMap<OperatorDescriptorId, List<IConnectorDescriptor>>();
@@ -104,6 +108,10 @@
         roots.add(op.getOperatorId());
     }
 
+    public void addResultSetId(ResultSetId rsId) {
+        resultSetIds.add(rsId);
+    }
+
     public void connect(IConnectorDescriptor conn, IOperatorDescriptor producerOp, int producerPort,
             IOperatorDescriptor consumerOp, int consumerPort) {
         insertIntoIndexedMap(opInputMap, consumerOp.getOperatorId(), consumerPort, conn);
@@ -208,6 +216,10 @@
         return roots;
     }
 
+    public List<ResultSetId> getResultSetIds() {
+        return resultSetIds;
+    }
+
     public IConnectorPolicyAssignmentPolicy getConnectorPolicyAssignmentPolicy() {
         return connectorPolicyAssignmentPolicy;
     }
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/partitions/ResultSetPartitionId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/partitions/ResultSetPartitionId.java
new file mode 100644
index 0000000..148a8a2
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/partitions/ResultSetPartitionId.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.partitions;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.job.JobId;
+
+public final class ResultSetPartitionId implements Serializable {
+    private static final long serialVersionUID = 1L;
+
+    private final JobId jobId;
+
+    private final ResultSetId resultSetId;
+    
+    private final int partition;
+
+    public ResultSetPartitionId(JobId jobId, ResultSetId resultSetId, int partition) {
+        this.jobId = jobId;
+        this.resultSetId = resultSetId;
+        this.partition = partition;
+    }
+
+    public JobId getJobId() {
+        return jobId;
+    }
+
+    public ResultSetId getResultSetId() {
+        return resultSetId;
+    }
+
+    public int getPartition() {
+        return partition;
+    }
+
+    @Override
+    public int hashCode() {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + ((resultSetId == null) ? 0 : resultSetId.hashCode());
+        result = prime * result + ((jobId == null) ? 0 : jobId.hashCode());
+        result = prime * result + partition;
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (this == obj)
+            return true;
+        if (obj == null)
+            return false;
+        if (getClass() != obj.getClass())
+            return false;
+        ResultSetPartitionId other = (ResultSetPartitionId) obj;
+        if (resultSetId == null) {
+            if (other.resultSetId != null)
+                return false;
+        } else if (!resultSetId.equals(other.resultSetId))
+            return false;
+        if (jobId == null) {
+            if (other.jobId != null)
+                return false;
+        } else if (!jobId.equals(other.jobId))
+            return false;
+        if (partition != other.partition)
+            return false;
+        return true;
+    }
+
+    @Override
+    public String toString() {
+        return jobId.toString() + ":" + resultSetId + ":" + partition;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/ActivityClusterGraphRewriter.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
new file mode 100644
index 0000000..c6761e9
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
@@ -0,0 +1,381 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.api.rewriter;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Queue;
+import java.util.Set;
+
+import org.apache.commons.lang3.tuple.Pair;
+
+import edu.uci.ics.hyracks.api.dataflow.ActivityId;
+import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
+import edu.uci.ics.hyracks.api.dataflow.IActivity;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.ActivityCluster;
+import edu.uci.ics.hyracks.api.job.ActivityClusterGraph;
+import edu.uci.ics.hyracks.api.job.ActivityClusterId;
+import edu.uci.ics.hyracks.api.rewriter.runtime.SuperActivity;
+
+/**
+ * This class rewrite the AcivityClusterGraph to eliminate
+ * all one-to-one connections and merge one-to-one connected
+ * DAGs into super activities.
+ * </p>
+ * Each super activity internally maintains a DAG and execute it at the runtime.
+ * 
+ * @author yingyib
+ */
+public class ActivityClusterGraphRewriter {
+    private static String ONE_TO_ONE_CONNECTOR = "edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor";
+
+    /**
+     * rewrite an activity cluster graph to eliminate
+     * all one-to-one connections and merge one-to-one connected
+     * DAGs into super activities.
+     * 
+     * @param acg
+     *            the activity cluster graph
+     */
+    public void rewrite(ActivityClusterGraph acg) {
+        acg.getActivityMap().clear();
+        acg.getConnectorMap().clear();
+        Map<IActivity, SuperActivity> invertedActivitySuperActivityMap = new HashMap<IActivity, SuperActivity>();
+        for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
+            rewriteIntraActivityCluster(entry.getValue(), invertedActivitySuperActivityMap);
+        }
+        for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
+            rewriteInterActivityCluster(entry.getValue(), invertedActivitySuperActivityMap);
+        }
+        invertedActivitySuperActivityMap.clear();
+    }
+
+    /**
+     * rewrite the blocking relationship among activity cluster
+     * 
+     * @param ac
+     *            the activity cluster to be rewritten
+     */
+    private void rewriteInterActivityCluster(ActivityCluster ac,
+            Map<IActivity, SuperActivity> invertedActivitySuperActivityMap) {
+        Map<ActivityId, Set<ActivityId>> blocked2BlockerMap = ac.getBlocked2BlockerMap();
+        Map<ActivityId, ActivityId> invertedAid2SuperAidMap = new HashMap<ActivityId, ActivityId>();
+        for (Entry<IActivity, SuperActivity> entry : invertedActivitySuperActivityMap.entrySet()) {
+            invertedAid2SuperAidMap.put(entry.getKey().getActivityId(), entry.getValue().getActivityId());
+        }
+        Map<ActivityId, Set<ActivityId>> replacedBlocked2BlockerMap = new HashMap<ActivityId, Set<ActivityId>>();
+        for (Entry<ActivityId, Set<ActivityId>> entry : blocked2BlockerMap.entrySet()) {
+            ActivityId blocked = entry.getKey();
+            ActivityId replacedBlocked = invertedAid2SuperAidMap.get(blocked);
+            Set<ActivityId> blockers = entry.getValue();
+            Set<ActivityId> replacedBlockers = null;
+            if (blockers != null) {
+                replacedBlockers = new HashSet<ActivityId>();
+                for (ActivityId blocker : blockers) {
+                    replacedBlockers.add(invertedAid2SuperAidMap.get(blocker));
+                    ActivityCluster dependingAc = ac.getActivityClusterGraph().getActivityMap()
+                            .get(invertedAid2SuperAidMap.get(blocker));
+                    if (!ac.getDependencies().contains(dependingAc)) {
+                        ac.getDependencies().add(dependingAc);
+                    }
+                }
+            }
+            if (replacedBlockers != null) {
+                Set<ActivityId> existingBlockers = replacedBlocked2BlockerMap.get(replacedBlocked);
+                if (existingBlockers == null) {
+                    replacedBlocked2BlockerMap.put(replacedBlocked, replacedBlockers);
+                } else {
+                    existingBlockers.addAll(replacedBlockers);
+                    replacedBlocked2BlockerMap.put(replacedBlocked, existingBlockers);
+                }
+            }
+        }
+        blocked2BlockerMap.clear();
+        blocked2BlockerMap.putAll(replacedBlocked2BlockerMap);
+    }
+
+    /**
+     * rewrite an activity cluster internally
+     * 
+     * @param ac
+     *            the activity cluster to be rewritten
+     */
+    private void rewriteIntraActivityCluster(ActivityCluster ac,
+            Map<IActivity, SuperActivity> invertedActivitySuperActivityMap) {
+        Map<ActivityId, IActivity> activities = ac.getActivityMap();
+        Map<ActivityId, List<IConnectorDescriptor>> activityInputMap = ac.getActivityInputMap();
+        Map<ActivityId, List<IConnectorDescriptor>> activityOutputMap = ac.getActivityOutputMap();
+        Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> connectorActivityMap = ac
+                .getConnectorActivityMap();
+        ActivityClusterGraph acg = ac.getActivityClusterGraph();
+        Map<ActivityId, IActivity> startActivities = new HashMap<ActivityId, IActivity>();
+        Map<ActivityId, SuperActivity> superActivities = new HashMap<ActivityId, SuperActivity>();
+        Map<ActivityId, Queue<IActivity>> toBeExpendedMap = new HashMap<ActivityId, Queue<IActivity>>();
+
+        /**
+         * Build the initial super activities
+         */
+        for (Entry<ActivityId, IActivity> entry : activities.entrySet()) {
+            ActivityId activityId = entry.getKey();
+            IActivity activity = entry.getValue();
+            if (activityInputMap.get(activityId) == null) {
+                startActivities.put(activityId, activity);
+                /**
+                 * use the start activity's id as the id of the super activity
+                 */
+                createNewSuperActivity(ac, superActivities, toBeExpendedMap, invertedActivitySuperActivityMap,
+                        activityId, activity);
+            }
+        }
+
+        /**
+         * expand one-to-one connected activity cluster by the BFS order.
+         * after the while-loop, the original activities are partitioned
+         * into equivalent classes, one-per-super-activity.
+         */
+        Map<ActivityId, SuperActivity> clonedSuperActivities = new HashMap<ActivityId, SuperActivity>();
+        while (toBeExpendedMap.size() > 0) {
+            clonedSuperActivities.clear();
+            clonedSuperActivities.putAll(superActivities);
+            for (Entry<ActivityId, SuperActivity> entry : clonedSuperActivities.entrySet()) {
+                ActivityId superActivityId = entry.getKey();
+                SuperActivity superActivity = entry.getValue();
+
+                /**
+                 * for the case where the super activity has already been swallowed
+                 */
+                if (superActivities.get(superActivityId) == null) {
+                    continue;
+                }
+
+                /**
+                 * expend the super activity
+                 */
+                Queue<IActivity> toBeExpended = toBeExpendedMap.get(superActivityId);
+                if (toBeExpended == null) {
+                    /**
+                     * Nothing to expand
+                     */
+                    continue;
+                }
+                IActivity expendingActivity = toBeExpended.poll();
+                List<IConnectorDescriptor> outputConnectors = activityOutputMap.get(expendingActivity.getActivityId());
+                if (outputConnectors != null) {
+                    for (IConnectorDescriptor outputConn : outputConnectors) {
+                        Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> endPoints = connectorActivityMap
+                                .get(outputConn.getConnectorId());
+                        IActivity newActivity = endPoints.getRight().getLeft();
+                        SuperActivity existingSuperActivity = invertedActivitySuperActivityMap.get(newActivity);
+                        if (outputConn.getClass().getName().contains(ONE_TO_ONE_CONNECTOR)) {
+                            /**
+                             * expend the super activity cluster on an one-to-one out-bound connection
+                             */
+                            if (existingSuperActivity == null) {
+                                superActivity.addActivity(newActivity);
+                                toBeExpended.add(newActivity);
+                                invertedActivitySuperActivityMap.put(newActivity, superActivity);
+                            } else {
+                                /**
+                                 * the two activities already in the same super activity
+                                 */
+                                if (existingSuperActivity == superActivity) {
+                                    continue;
+                                }
+                                /**
+                                 * swallow an existing super activity
+                                 */
+                                swallowExistingSuperActivity(superActivities, toBeExpendedMap,
+                                        invertedActivitySuperActivityMap, superActivity, superActivityId,
+                                        existingSuperActivity);
+                            }
+                        } else {
+                            if (existingSuperActivity == null) {
+                                /**
+                                 * create new activity
+                                 */
+                                createNewSuperActivity(ac, superActivities, toBeExpendedMap,
+                                        invertedActivitySuperActivityMap, newActivity.getActivityId(), newActivity);
+                            }
+                        }
+                    }
+                }
+
+                /**
+                 * remove the to-be-expended queue if it is empty
+                 */
+                if (toBeExpended.size() == 0) {
+                    toBeExpendedMap.remove(superActivityId);
+                }
+            }
+        }
+
+        Map<ConnectorDescriptorId, IConnectorDescriptor> connMap = ac.getConnectorMap();
+        Map<ConnectorDescriptorId, RecordDescriptor> connRecordDesc = ac.getConnectorRecordDescriptorMap();
+        Map<SuperActivity, Integer> superActivityProducerPort = new HashMap<SuperActivity, Integer>();
+        Map<SuperActivity, Integer> superActivityConsumerPort = new HashMap<SuperActivity, Integer>();
+        for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
+            superActivityProducerPort.put(entry.getValue(), 0);
+            superActivityConsumerPort.put(entry.getValue(), 0);
+        }
+
+        /**
+         * create a new activity cluster to replace the old activity cluster
+         */
+        ActivityCluster newActivityCluster = new ActivityCluster(acg, ac.getId());
+        newActivityCluster.setConnectorPolicyAssignmentPolicy(ac.getConnectorPolicyAssignmentPolicy());
+        for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
+            newActivityCluster.addActivity(entry.getValue());
+            acg.getActivityMap().put(entry.getKey(), newActivityCluster);
+        }
+
+        /**
+         * Setup connectors: either inside a super activity or among super activities
+         */
+        for (Entry<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> entry : connectorActivityMap
+                .entrySet()) {
+            ConnectorDescriptorId connectorId = entry.getKey();
+            Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> endPoints = entry.getValue();
+            IActivity producerActivity = endPoints.getLeft().getLeft();
+            IActivity consumerActivity = endPoints.getRight().getLeft();
+            int producerPort = endPoints.getLeft().getRight();
+            int consumerPort = endPoints.getRight().getRight();
+            RecordDescriptor recordDescriptor = connRecordDesc.get(connectorId);
+            IConnectorDescriptor conn = connMap.get(connectorId);
+            if (conn.getClass().getName().contains(ONE_TO_ONE_CONNECTOR)) {
+                /**
+                 * connection edge between inner activities
+                 */
+                SuperActivity residingSuperActivity = invertedActivitySuperActivityMap.get(producerActivity);
+                residingSuperActivity.connect(conn, producerActivity, producerPort, consumerActivity, consumerPort,
+                        recordDescriptor);
+            } else {
+                /**
+                 * connection edge between super activities
+                 */
+                SuperActivity producerSuperActivity = invertedActivitySuperActivityMap.get(producerActivity);
+                SuperActivity consumerSuperActivity = invertedActivitySuperActivityMap.get(consumerActivity);
+                int producerSAPort = superActivityProducerPort.get(producerSuperActivity);
+                int consumerSAPort = superActivityConsumerPort.get(consumerSuperActivity);
+                newActivityCluster.addConnector(conn);
+                newActivityCluster.connect(conn, producerSuperActivity, producerSAPort, consumerSuperActivity,
+                        consumerSAPort, recordDescriptor);
+
+                /**
+                 * bridge the port
+                 */
+                producerSuperActivity.setClusterOutputIndex(producerSAPort, producerActivity.getActivityId(),
+                        producerPort);
+                consumerSuperActivity.setClusterInputIndex(consumerSAPort, consumerActivity.getActivityId(),
+                        consumerPort);
+                acg.getConnectorMap().put(connectorId, newActivityCluster);
+
+                /**
+                 * increasing the port number for the producer and consumer
+                 */
+                superActivityProducerPort.put(producerSuperActivity, ++producerSAPort);
+                superActivityConsumerPort.put(consumerSuperActivity, ++consumerSAPort);
+            }
+        }
+
+        /**
+         * Set up the roots of the new activity cluster
+         */
+        for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
+            List<IConnectorDescriptor> connIds = newActivityCluster.getActivityOutputMap().get(entry.getKey());
+            if (connIds == null || connIds.size() == 0) {
+                newActivityCluster.addRoot(entry.getValue());
+            }
+        }
+
+        /**
+         * set up the blocked2Blocker mapping, which will be updated in the rewriteInterActivityCluster call
+         */
+        newActivityCluster.getBlocked2BlockerMap().putAll(ac.getBlocked2BlockerMap());
+
+        /**
+         * replace the old activity cluster with the new activity cluster
+         */
+        acg.getActivityClusterMap().put(ac.getId(), newActivityCluster);
+    }
+
+    /**
+     * Create a new super activity
+     * 
+     * @param acg
+     *            the activity cluster
+     * @param superActivities
+     *            the map from activity id to current super activities
+     * @param toBeExpendedMap
+     *            the map from an existing super activity to its BFS expansion queue of the original activities
+     * @param invertedActivitySuperActivityMap
+     *            the map from the original activities to their hosted super activities
+     * @param activityId
+     *            the activity id for the new super activity, which is the first added acitivty's id in the super activity
+     * @param activity
+     *            the first activity added to the new super activity
+     */
+    private void createNewSuperActivity(ActivityCluster acg, Map<ActivityId, SuperActivity> superActivities,
+            Map<ActivityId, Queue<IActivity>> toBeExpendedMap,
+            Map<IActivity, SuperActivity> invertedActivitySuperActivityMap, ActivityId activityId, IActivity activity) {
+        SuperActivity superActivity = new SuperActivity(acg.getActivityClusterGraph(), acg.getId(), activityId);
+        superActivities.put(activityId, superActivity);
+        superActivity.addActivity(activity);
+        Queue<IActivity> toBeExpended = new LinkedList<IActivity>();
+        toBeExpended.add(activity);
+        toBeExpendedMap.put(activityId, toBeExpended);
+        invertedActivitySuperActivityMap.put(activity, superActivity);
+    }
+
+    /**
+     * One super activity swallows another existing super activity.
+     * 
+     * @param superActivities
+     *            the map from activity id to current super activities
+     * @param toBeExpendedMap
+     *            the map from an existing super activity to its BFS expansion queue of the original activities
+     * @param invertedActivitySuperActivityMap
+     *            the map from the original activities to their hosted super activities
+     * @param superActivity
+     *            the "swallowing" super activity
+     * @param superActivityId
+     *            the activity id for the "swallowing" super activity, which is also the first added acitivty's id in the super activity
+     * @param existingSuperActivity
+     *            an existing super activity which is to be swallowed by the "swallowing" super activity
+     */
+    private void swallowExistingSuperActivity(Map<ActivityId, SuperActivity> superActivities,
+            Map<ActivityId, Queue<IActivity>> toBeExpendedMap,
+            Map<IActivity, SuperActivity> invertedActivitySuperActivityMap, SuperActivity superActivity,
+            ActivityId superActivityId, SuperActivity existingSuperActivity) {
+        ActivityId existingSuperActivityId = existingSuperActivity.getActivityId();
+        superActivities.remove(existingSuperActivityId);
+        for (Entry<ActivityId, IActivity> existingEntry : existingSuperActivity.getActivityMap().entrySet()) {
+            IActivity existingActivity = existingEntry.getValue();
+            superActivity.addActivity(existingActivity);
+            invertedActivitySuperActivityMap.put(existingActivity, superActivity);
+        }
+        Queue<IActivity> tbeQueue = toBeExpendedMap.get(superActivityId);
+        Queue<IActivity> existingTbeQueque = toBeExpendedMap.remove(existingSuperActivityId);
+        if (existingTbeQueque != null) {
+            tbeQueue.addAll(existingTbeQueque);
+        }
+    }
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/OneToOneConnectedActivityCluster.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/OneToOneConnectedActivityCluster.java
new file mode 100644
index 0000000..07b7ffc
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/OneToOneConnectedActivityCluster.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.api.rewriter;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.lang3.tuple.Pair;
+
+import edu.uci.ics.hyracks.api.dataflow.ActivityId;
+import edu.uci.ics.hyracks.api.job.ActivityCluster;
+import edu.uci.ics.hyracks.api.job.ActivityClusterGraph;
+import edu.uci.ics.hyracks.api.job.ActivityClusterId;
+
+/**
+ * All the connectors in an OneToOneConnectedCluster are OneToOneConnectorDescriptors.
+ * 
+ * @author yingyib
+ */
+public class OneToOneConnectedActivityCluster extends ActivityCluster {
+
+    private static final long serialVersionUID = 1L;
+
+    protected final Map<Integer, Pair<ActivityId, Integer>> clusterInputIndexMap = new HashMap<Integer, Pair<ActivityId, Integer>>();
+    protected final Map<Integer, Pair<ActivityId, Integer>> clusterOutputIndexMap = new HashMap<Integer, Pair<ActivityId, Integer>>();
+    protected final Map<Pair<ActivityId, Integer>, Integer> invertedClusterOutputIndexMap = new HashMap<Pair<ActivityId, Integer>, Integer>();
+    protected final Map<Pair<ActivityId, Integer>, Integer> invertedClusterInputIndexMap = new HashMap<Pair<ActivityId, Integer>, Integer>();
+
+    public OneToOneConnectedActivityCluster(ActivityClusterGraph acg, ActivityClusterId id) {
+        super(acg, id);
+    }
+
+    /**
+     * Set up the mapping of the cluster's output channel to an internal activity and its output channel
+     * 
+     * @param clusterOutputIndex
+     *            the output channel index for the cluster
+     * @param activityId
+     *            the id of the internal activity which produces the corresponding output
+     * @param activityOutputIndex
+     *            the output channel index of the internal activity which corresponds to the output channel of the cluster of activities
+     */
+    public void setClusterOutputIndex(int clusterOutputIndex, ActivityId activityId, int activityOutputIndex) {
+        clusterOutputIndexMap.put(clusterOutputIndex, Pair.of(activityId, activityOutputIndex));
+        invertedClusterOutputIndexMap.put(Pair.of(activityId, activityOutputIndex), clusterOutputIndex);
+    }
+
+    /**
+     * get the an internal activity and its output channel of a cluster output channel
+     * 
+     * @param clusterOutputIndex
+     *            the output channel index for the cluster
+     * @return a pair containing the activity id of the corresponding internal activity and the output channel index
+     */
+    public Pair<ActivityId, Integer> getActivityIdOutputIndex(int clusterOutputIndex) {
+        return clusterOutputIndexMap.get(clusterOutputIndex);
+    }
+
+    /**
+     * Set up the mapping of the cluster's input channel to an internal activity and input output channel
+     * 
+     * @param clusterInputIndex
+     *            the input channel index for the cluster
+     * @param activityId
+     *            the id of the internal activity which consumes the corresponding input
+     * @param activityInputIndex
+     *            the output channel index of the internal activity which corresponds to the input channel of the cluster of activities
+     */
+    public void setClusterInputIndex(int clusterInputIndex, ActivityId activityId, int activityInputIndex) {
+        clusterInputIndexMap.put(clusterInputIndex, Pair.of(activityId, activityInputIndex));
+        invertedClusterInputIndexMap.put(Pair.of(activityId, activityInputIndex), clusterInputIndex);
+    }
+
+    /**
+     * get the an internal activity and its input channel of a cluster input channel
+     * 
+     * @param clusterOutputIndex
+     *            the output channel index for the cluster
+     * @return a pair containing the activity id of the corresponding internal activity and the output channel index
+     */
+    public Pair<ActivityId, Integer> getActivityIdInputIndex(int clusterInputIndex) {
+        return clusterInputIndexMap.get(clusterInputIndex);
+    }
+
+    /**
+     * Get the cluster input channel of an input-boundary activity and its input channel
+     * 
+     * @param activityInputChannel
+     *            the input-boundary activity and its input channel
+     * @return the cluster input channel
+     */
+    public int getClusterInputIndex(Pair<ActivityId, Integer> activityInputChannel) {
+        Integer channel = invertedClusterInputIndexMap.get(activityInputChannel);
+        return channel == null ? -1 : channel;
+    }
+
+    /**
+     * Get the cluster output channel of an input-boundary activity and its output channel
+     * 
+     * @param activityOutputChannel
+     *            the output-boundary activity and its output channel
+     * @return the cluster output channel
+     */
+    public int getClusterOutputIndex(Pair<ActivityId, Integer> activityOutputChannel) {
+        Integer channel = invertedClusterOutputIndexMap.get(activityOutputChannel);
+        return channel == null ? -1 : channel;
+    }
+
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/runtime/SuperActivity.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/runtime/SuperActivity.java
new file mode 100644
index 0000000..734ff85
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/runtime/SuperActivity.java
@@ -0,0 +1,177 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.api.rewriter.runtime;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.commons.lang3.tuple.Pair;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.ActivityId;
+import edu.uci.ics.hyracks.api.dataflow.IActivity;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.ActivityCluster;
+import edu.uci.ics.hyracks.api.job.ActivityClusterGraph;
+import edu.uci.ics.hyracks.api.job.ActivityClusterId;
+import edu.uci.ics.hyracks.api.rewriter.OneToOneConnectedActivityCluster;
+
+/**
+ * This class can be used to execute a DAG of activities inside which
+ * there are only one-to-one connectors.
+ * 
+ * @author yingyib
+ */
+public class SuperActivity extends OneToOneConnectedActivityCluster implements IActivity {
+    private static final long serialVersionUID = 1L;
+    private final ActivityId activityId;
+
+    public SuperActivity(ActivityClusterGraph acg, ActivityClusterId id, ActivityId activityId) {
+        super(acg, id);
+        this.activityId = activityId;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+            throws HyracksDataException {
+        final Map<ActivityId, IActivity> startActivities = new HashMap<ActivityId, IActivity>();
+        Map<ActivityId, IActivity> activities = getActivityMap();
+        for (Entry<ActivityId, IActivity> entry : activities.entrySet()) {
+            /**
+             * extract start activities
+             */
+            List<IConnectorDescriptor> conns = getActivityInputMap().get(entry.getKey());
+            if (conns == null || conns.size() == 0) {
+                startActivities.put(entry.getKey(), entry.getValue());
+            }
+        }
+
+        /**
+         * wrap a RecordDescriptorProvider for the super activity
+         */
+        IRecordDescriptorProvider wrappedRecDescProvider = new IRecordDescriptorProvider() {
+
+            @Override
+            public RecordDescriptor getInputRecordDescriptor(ActivityId aid, int inputIndex) {
+                if (startActivities.get(aid) != null) {
+                    /**
+                     * if the activity is a start (input boundary) activity
+                     */
+                    int superActivityInputChannel = SuperActivity.this.getClusterInputIndex(Pair.of(aid, inputIndex));
+                    if (superActivityInputChannel >= 0) {
+                        return recordDescProvider.getInputRecordDescriptor(activityId, superActivityInputChannel);
+                    }
+                }
+                if (SuperActivity.this.getActivityMap().get(aid) != null) {
+                    /**
+                     * if the activity is an internal activity of the super activity
+                     */
+                    IConnectorDescriptor conn = getActivityInputMap().get(aid).get(inputIndex);
+                    return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
+                }
+
+                /**
+                 * the following is for the case where the activity is in other SuperActivities
+                 */
+                ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();
+                for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
+                    ActivityCluster ac = entry.getValue();
+                    for (Entry<ActivityId, IActivity> saEntry : ac.getActivityMap().entrySet()) {
+                        SuperActivity sa = (SuperActivity) saEntry.getValue();
+                        if (sa.getActivityMap().get(aid) != null) {
+                            List<IConnectorDescriptor> conns = sa.getActivityInputMap().get(aid);
+                            if (conns != null && conns.size() >= inputIndex) {
+                                IConnectorDescriptor conn = conns.get(inputIndex);
+                                return sa.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
+                            } else {
+                                int superActivityInputChannel = sa.getClusterInputIndex(Pair.of(aid, inputIndex));
+                                if (superActivityInputChannel >= 0) {
+                                    return recordDescProvider.getInputRecordDescriptor(sa.getActivityId(),
+                                            superActivityInputChannel);
+                                }
+                            }
+                        }
+                    }
+                }
+                return null;
+            }
+
+            @Override
+            public RecordDescriptor getOutputRecordDescriptor(ActivityId aid, int outputIndex) {
+                /**
+                 * if the activity is an output-boundary activity
+                 */
+                int superActivityOutputChannel = SuperActivity.this.getClusterOutputIndex(Pair.of(aid, outputIndex));
+                if (superActivityOutputChannel >= 0) {
+                    return recordDescProvider.getOutputRecordDescriptor(activityId, superActivityOutputChannel);
+                }
+
+                if (SuperActivity.this.getActivityMap().get(aid) != null) {
+                    /**
+                     * if the activity is an internal activity of the super activity
+                     */
+                    IConnectorDescriptor conn = getActivityOutputMap().get(aid).get(outputIndex);
+                    return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
+                }
+
+                /**
+                 * the following is for the case where the activity is in other SuperActivities
+                 */
+                ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();
+                for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
+                    ActivityCluster ac = entry.getValue();
+                    for (Entry<ActivityId, IActivity> saEntry : ac.getActivityMap().entrySet()) {
+                        SuperActivity sa = (SuperActivity) saEntry.getValue();
+                        if (sa.getActivityMap().get(aid) != null) {
+                            List<IConnectorDescriptor> conns = sa.getActivityOutputMap().get(aid);
+                            if (conns != null && conns.size() >= outputIndex) {
+                                IConnectorDescriptor conn = conns.get(outputIndex);
+                                return sa.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
+                            } else {
+                                superActivityOutputChannel = sa.getClusterOutputIndex(Pair.of(aid, outputIndex));
+                                if (superActivityOutputChannel >= 0) {
+                                    return recordDescProvider.getOutputRecordDescriptor(sa.getActivityId(),
+                                            superActivityOutputChannel);
+                                }
+                            }
+                        }
+                    }
+                }
+                return null;
+            }
+
+        };
+        return new SuperActivityOperatorNodePushable(this, startActivities, ctx, wrappedRecDescProvider, partition,
+                nPartitions);
+    }
+
+    @Override
+    public ActivityId getActivityId() {
+        return activityId;
+    }
+
+    @Override
+    public String toString() {
+        return getActivityMap().values().toString();
+    }
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/runtime/SuperActivityOperatorNodePushable.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/runtime/SuperActivityOperatorNodePushable.java
new file mode 100644
index 0000000..7d50fa0
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/rewriter/runtime/SuperActivityOperatorNodePushable.java
@@ -0,0 +1,195 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.api.rewriter.runtime;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Queue;
+
+import org.apache.commons.lang3.tuple.Pair;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.ActivityId;
+import edu.uci.ics.hyracks.api.dataflow.IActivity;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * The runtime of a SuperActivity, which internally executes a DAG of one-to-one
+ * connected activities in a single thread.
+ * 
+ * @author yingyib
+ */
+public class SuperActivityOperatorNodePushable implements IOperatorNodePushable {
+    private final Map<ActivityId, IOperatorNodePushable> operatorNodePushables = new HashMap<ActivityId, IOperatorNodePushable>();
+    private final List<IOperatorNodePushable> operatprNodePushablesBFSOrder = new ArrayList<IOperatorNodePushable>();
+    private final Map<ActivityId, IActivity> startActivities;
+    private final SuperActivity parent;
+    private final IHyracksTaskContext ctx;
+    private final IRecordDescriptorProvider recordDescProvider;
+    private final int partition;
+    private final int nPartitions;
+    private int inputArity = 0;
+
+    public SuperActivityOperatorNodePushable(SuperActivity parent, Map<ActivityId, IActivity> startActivities,
+            IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+        this.parent = parent;
+        this.startActivities = startActivities;
+        this.ctx = ctx;
+        this.recordDescProvider = recordDescProvider;
+        this.partition = partition;
+        this.nPartitions = nPartitions;
+
+        /**
+         * initialize the writer-relationship for the internal DAG of operator
+         * node pushables
+         */
+        try {
+            init();
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    @Override
+    public void initialize() throws HyracksDataException {
+        /**
+         * initialize operator node pushables in the BFS order
+         */
+        for (IOperatorNodePushable op : operatprNodePushablesBFSOrder) {
+            op.initialize();
+        }
+    }
+
+    public void init() throws HyracksDataException {
+        Map<ActivityId, IOperatorNodePushable> startOperatorNodePushables = new HashMap<ActivityId, IOperatorNodePushable>();
+        Queue<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> childQueue = new LinkedList<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
+        List<IConnectorDescriptor> outputConnectors = null;
+
+        /**
+         * Set up the source operators
+         */
+        for (Entry<ActivityId, IActivity> entry : startActivities.entrySet()) {
+            IOperatorNodePushable opPushable = entry.getValue().createPushRuntime(ctx, recordDescProvider, partition,
+                    nPartitions);
+            startOperatorNodePushables.put(entry.getKey(), opPushable);
+            operatprNodePushablesBFSOrder.add(opPushable);
+            operatorNodePushables.put(entry.getKey(), opPushable);
+            inputArity += opPushable.getInputArity();
+            outputConnectors = parent.getActivityOutputMap().get(entry.getKey());
+            if (outputConnectors != null) {
+                for (IConnectorDescriptor conn : outputConnectors) {
+                    childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId()));
+                }
+            }
+        }
+
+        /**
+         * Using BFS (breadth-first search) to construct to runtime execution
+         * DAG;
+         */
+        while (childQueue.size() > 0) {
+            /**
+             * expend the executing activities further to the downstream
+             */
+            if (outputConnectors != null && outputConnectors.size() > 0) {
+                for (IConnectorDescriptor conn : outputConnectors) {
+                    if (conn != null) {
+                        childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId()));
+                    }
+                }
+            }
+
+            /**
+             * construct the source to destination information
+             */
+            Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> channel = childQueue.poll();
+            ActivityId sourceId = channel.getLeft().getLeft().getActivityId();
+            int outputChannel = channel.getLeft().getRight();
+            ActivityId destId = channel.getRight().getLeft().getActivityId();
+            int inputChannel = channel.getRight().getRight();
+            IOperatorNodePushable sourceOp = operatorNodePushables.get(sourceId);
+            IOperatorNodePushable destOp = operatorNodePushables.get(destId);
+            if (destOp == null) {
+                destOp = channel.getRight().getLeft()
+                        .createPushRuntime(ctx, recordDescProvider, partition, nPartitions);
+                operatprNodePushablesBFSOrder.add(destOp);
+                operatorNodePushables.put(destId, destOp);
+            }
+
+            /**
+             * construct the dataflow connection from a producer to a consumer
+             */
+            sourceOp.setOutputFrameWriter(outputChannel, destOp.getInputFrameWriter(inputChannel),
+                    recordDescProvider.getInputRecordDescriptor(destId, inputChannel));
+
+            /**
+             * traverse to the child of the current activity
+             */
+            outputConnectors = parent.getActivityOutputMap().get(destId);
+        }
+    }
+
+    @Override
+    public void deinitialize() throws HyracksDataException {
+        /**
+         * de-initialize operator node pushables
+         */
+        for (IOperatorNodePushable op : operatprNodePushablesBFSOrder) {
+            op.deinitialize();
+        }
+    }
+
+    @Override
+    public int getInputArity() {
+        return inputArity;
+    }
+
+    @Override
+    public void setOutputFrameWriter(int clusterOutputIndex, IFrameWriter writer, RecordDescriptor recordDesc) {
+        /**
+         * set the right output frame writer
+         */
+        Pair<ActivityId, Integer> activityIdOutputIndex = parent.getActivityIdOutputIndex(clusterOutputIndex);
+        IOperatorNodePushable opPushable = operatorNodePushables.get(activityIdOutputIndex.getLeft());
+        opPushable.setOutputFrameWriter(activityIdOutputIndex.getRight(), writer, recordDesc);
+    }
+
+    @Override
+    public IFrameWriter getInputFrameWriter(final int index) {
+        /**
+         * get the right IFrameWriter from the cluster input index
+         */
+        Pair<ActivityId, Integer> activityIdInputIndex = parent.getActivityIdInputIndex(index);
+        IOperatorNodePushable operatorNodePushable = operatorNodePushables.get(activityIdInputIndex.getLeft());
+        IFrameWriter writer = operatorNodePushable.getInputFrameWriter(activityIdInputIndex.getRight());
+        return writer;
+    }
+
+    @Override
+    public String getDisplayName() {
+        return "Super Activity " + parent.getActivityMap().values().toString();
+    }
+
+}
diff --git a/hyracks/hyracks-cli/pom.xml b/hyracks/hyracks-cli/pom.xml
index 3933be0..991ce2a 100644
--- a/hyracks/hyracks-cli/pom.xml
+++ b/hyracks/hyracks-cli/pom.xml
@@ -18,8 +18,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
       <plugin>
@@ -41,6 +42,7 @@
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
         <executions>
           <execution>
             <configuration>
diff --git a/hyracks/hyracks-client/pom.xml b/hyracks/hyracks-client/pom.xml
new file mode 100644
index 0000000..854a009
--- /dev/null
+++ b/hyracks/hyracks-client/pom.xml
@@ -0,0 +1,46 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-client</artifactId>
+  <name>hyracks-client</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+ <dependency>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-api</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-net</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-comm</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+ </dependency>
+  <dependency>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-dataflow-common</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+ </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/DatasetClientContext.java b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/DatasetClientContext.java
new file mode 100644
index 0000000..8be4a8c
--- /dev/null
+++ b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/DatasetClientContext.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.client.dataset;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+
+public class DatasetClientContext implements IHyracksCommonContext {
+    private final int frameSize;
+
+    public DatasetClientContext(int frameSize) {
+        this.frameSize = frameSize;
+    }
+
+    @Override
+    public int getFrameSize() {
+        return frameSize;
+    }
+
+    @Override
+    public IIOManager getIOManager() {
+        return null;
+    }
+
+    @Override
+    public ByteBuffer allocateFrame() {
+        return ByteBuffer.allocate(frameSize);
+    }
+
+}
diff --git a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDataset.java b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDataset.java
new file mode 100644
index 0000000..6866e46
--- /dev/null
+++ b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDataset.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.client.dataset;
+
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetDirectoryServiceConnection;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetReader;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.client.net.ClientNetworkManager;
+
+public class HyracksDataset implements IHyracksDataset {
+    private final IHyracksDatasetDirectoryServiceConnection datasetDirectoryServiceConnection;
+
+    private final ClientNetworkManager netManager;
+
+    private final DatasetClientContext datasetClientCtx;
+
+    public HyracksDataset(IHyracksClientConnection hcc, int frameSize, int nReaders) throws Exception {
+        NetworkAddress ddsAddress = hcc.getDatasetDirectoryServiceInfo();
+        datasetDirectoryServiceConnection = new HyracksDatasetDirectoryServiceConnection(new String(
+                ddsAddress.getIpAddress()), ddsAddress.getPort());
+
+        netManager = new ClientNetworkManager(nReaders);
+        netManager.start();
+
+        datasetClientCtx = new DatasetClientContext(frameSize);
+    }
+
+    @Override
+    public IHyracksDatasetReader createReader(JobId jobId, ResultSetId resultSetId) throws HyracksDataException {
+        IHyracksDatasetReader reader = null;
+        try {
+            reader = new HyracksDatasetReader(datasetDirectoryServiceConnection, netManager, datasetClientCtx, jobId,
+                    resultSetId);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+        return reader;
+    }
+}
diff --git a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceConnection.java b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceConnection.java
new file mode 100644
index 0000000..095fd7d
--- /dev/null
+++ b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceConnection.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.client.dataset;
+
+import java.net.InetSocketAddress;
+
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetDirectoryServiceConnection;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetDirectoryServiceInterface;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.ipc.api.IIPCHandle;
+import edu.uci.ics.hyracks.ipc.api.RPCInterface;
+import edu.uci.ics.hyracks.ipc.impl.IPCSystem;
+import edu.uci.ics.hyracks.ipc.impl.JavaSerializationBasedPayloadSerializerDeserializer;
+
+//TODO(madhusudancs): Should this implementation be moved to edu.uci.ics.hyracks.client?
+public class HyracksDatasetDirectoryServiceConnection implements IHyracksDatasetDirectoryServiceConnection {
+    private final IPCSystem ipc;
+    private final IHyracksDatasetDirectoryServiceInterface ddsi;
+
+    public HyracksDatasetDirectoryServiceConnection(String ddsHost, int ddsPort) throws Exception {
+        RPCInterface rpci = new RPCInterface();
+        ipc = new IPCSystem(new InetSocketAddress(0), rpci, new JavaSerializationBasedPayloadSerializerDeserializer());
+        ipc.start();
+        IIPCHandle ddsIpchandle = ipc.getHandle(new InetSocketAddress(ddsHost, ddsPort));
+        this.ddsi = new HyracksDatasetDirectoryServiceInterfaceRemoteProxy(ddsIpchandle, rpci);
+    }
+
+    @Override
+    public Status getDatasetResultStatus(JobId jobId, ResultSetId rsId) throws Exception {
+        return ddsi.getDatasetResultStatus(jobId, rsId);
+    }
+
+    @Override
+    public DatasetDirectoryRecord[] getDatasetResultLocations(JobId jobId, ResultSetId rsId,
+            DatasetDirectoryRecord[] knownRecords) throws Exception {
+        return ddsi.getDatasetResultLocations(jobId, rsId, knownRecords);
+    }
+}
diff --git a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java
new file mode 100644
index 0000000..47cdf97
--- /dev/null
+++ b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.client.dataset;
+
+import edu.uci.ics.hyracks.api.client.HyracksClientInterfaceFunctions;
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetDirectoryServiceInterface;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.ipc.api.IIPCHandle;
+import edu.uci.ics.hyracks.ipc.api.RPCInterface;
+
+//TODO(madhusudancs): Should this implementation be moved to edu.uci.ics.hyracks.client?
+public class HyracksDatasetDirectoryServiceInterfaceRemoteProxy implements IHyracksDatasetDirectoryServiceInterface {
+    private final IIPCHandle ipcHandle;
+
+    private final RPCInterface rpci;
+
+    public HyracksDatasetDirectoryServiceInterfaceRemoteProxy(IIPCHandle ipcHandle, RPCInterface rpci) {
+        this.ipcHandle = ipcHandle;
+        this.rpci = rpci;
+    }
+
+    @Override
+    public Status getDatasetResultStatus(JobId jobId, ResultSetId rsId) throws Exception {
+        HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction gdrlf = new HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction(
+                jobId, rsId);
+        return (Status) rpci.call(ipcHandle, gdrlf);
+    }
+
+    @Override
+    public DatasetDirectoryRecord[] getDatasetResultLocations(JobId jobId, ResultSetId rsId,
+            DatasetDirectoryRecord[] knownRecords) throws Exception {
+        HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction gdrlf = new HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction(
+                jobId, rsId, knownRecords);
+        return (DatasetDirectoryRecord[]) rpci.call(ipcHandle, gdrlf);
+    }
+}
diff --git a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetReader.java b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetReader.java
new file mode 100644
index 0000000..78bcf20
--- /dev/null
+++ b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetReader.java
@@ -0,0 +1,255 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.client.dataset;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.net.UnknownHostException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.channels.IInputChannel;
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.IDatasetInputChannelMonitor;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetDirectoryServiceConnection;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetReader;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.client.net.ClientNetworkManager;
+import edu.uci.ics.hyracks.comm.channels.DatasetNetworkInputChannel;
+
+// TODO(madhusudancs): Should this implementation be moved to edu.uci.ics.hyracks.client?
+public class HyracksDatasetReader implements IHyracksDatasetReader {
+    private static final Logger LOGGER = Logger.getLogger(HyracksDatasetReader.class.getName());
+
+    private final IHyracksDatasetDirectoryServiceConnection datasetDirectoryServiceConnection;
+
+    private final ClientNetworkManager netManager;
+
+    private final DatasetClientContext datasetClientCtx;
+
+    private JobId jobId;
+
+    private ResultSetId resultSetId;
+
+    private DatasetDirectoryRecord[] knownRecords;
+
+    private IDatasetInputChannelMonitor[] monitors;
+
+    private int lastReadPartition;
+
+    private IDatasetInputChannelMonitor lastMonitor;
+
+    private DatasetNetworkInputChannel resultChannel;
+
+    private static int NUM_READ_BUFFERS = 1;
+
+    public HyracksDatasetReader(IHyracksDatasetDirectoryServiceConnection datasetDirectoryServiceConnection,
+            ClientNetworkManager netManager, DatasetClientContext datasetClientCtx, JobId jobId, ResultSetId resultSetId)
+            throws Exception {
+        this.datasetDirectoryServiceConnection = datasetDirectoryServiceConnection;
+        this.netManager = netManager;
+        this.datasetClientCtx = datasetClientCtx;
+        this.jobId = jobId;
+        this.resultSetId = resultSetId;
+        knownRecords = null;
+        monitors = null;
+        lastReadPartition = -1;
+        lastMonitor = null;
+        resultChannel = null;
+    }
+
+    @Override
+    public Status getResultStatus() {
+        Status status = null;
+        try {
+            status = datasetDirectoryServiceConnection.getDatasetResultStatus(jobId, resultSetId);
+        } catch (Exception e) {
+            // TODO(madhusudancs): Decide what to do in case of error
+        }
+        return status;
+    }
+
+    @Override
+    public int read(ByteBuffer buffer) throws HyracksDataException {
+        ByteBuffer readBuffer;
+        int readSize = 0;
+
+        if (lastReadPartition == -1) {
+            while (knownRecords == null || knownRecords[0] == null) {
+                try {
+                    knownRecords = datasetDirectoryServiceConnection.getDatasetResultLocations(jobId, resultSetId,
+                            knownRecords);
+                    lastReadPartition = 0;
+                    resultChannel = new DatasetNetworkInputChannel(netManager,
+                            getSocketAddress(knownRecords[lastReadPartition]), jobId, lastReadPartition,
+                            NUM_READ_BUFFERS);
+                    lastMonitor = getMonitor(lastReadPartition);
+                    resultChannel.open(datasetClientCtx);
+                    resultChannel.registerMonitor(lastMonitor);
+                } catch (HyracksException e) {
+                    throw new HyracksDataException(e);
+                } catch (UnknownHostException e) {
+                    throw new HyracksDataException(e);
+                } catch (Exception e) {
+                    // Do nothing here.
+                }
+            }
+        }
+
+        while (readSize <= 0 && !((lastReadPartition == knownRecords.length - 1) && (lastMonitor.eosReached()))) {
+            synchronized (lastMonitor) {
+                while (lastMonitor.getNFramesAvailable() <= 0 && !lastMonitor.eosReached()) {
+                    try {
+                        lastMonitor.wait();
+                    } catch (InterruptedException e) {
+                        throw new HyracksDataException(e);
+                    }
+                }
+            }
+
+            if (lastMonitor.getNFramesAvailable() <= 0 && lastMonitor.eosReached()) {
+                knownRecords[lastReadPartition].readEOS();
+                if ((lastReadPartition == knownRecords.length - 1)) {
+                    break;
+                } else {
+                    try {
+                        lastReadPartition++;
+                        while (knownRecords[lastReadPartition] == null) {
+                            try {
+                                knownRecords = datasetDirectoryServiceConnection.getDatasetResultLocations(jobId,
+                                        resultSetId, knownRecords);
+                            } catch (Exception e) {
+                                // Do nothing here.
+                            }
+                        }
+
+                        resultChannel = new DatasetNetworkInputChannel(netManager,
+                                getSocketAddress(knownRecords[lastReadPartition]), jobId, lastReadPartition,
+                                NUM_READ_BUFFERS);
+                        lastMonitor = getMonitor(lastReadPartition);
+                        resultChannel.open(datasetClientCtx);
+                        resultChannel.registerMonitor(lastMonitor);
+                    } catch (HyracksException e) {
+                        throw new HyracksDataException(e);
+                    } catch (UnknownHostException e) {
+                        throw new HyracksDataException(e);
+                    }
+                }
+            } else {
+                readBuffer = resultChannel.getNextBuffer();
+                lastMonitor.notifyFrameRead();
+                if (readBuffer != null) {
+                    buffer.put(readBuffer);
+                    buffer.flip();
+                    readSize = buffer.limit();
+                    resultChannel.recycleBuffer(readBuffer);
+                }
+            }
+        }
+
+        return readSize;
+    }
+
+    private boolean nullExists(DatasetDirectoryRecord[] locations) {
+        if (locations == null) {
+            return true;
+        }
+        for (int i = 0; i < locations.length; i++) {
+            if (locations[i] == null) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private SocketAddress getSocketAddress(DatasetDirectoryRecord addr) throws UnknownHostException {
+        NetworkAddress netAddr = addr.getNetworkAddress();
+        return new InetSocketAddress(InetAddress.getByAddress(netAddr.getIpAddress()), netAddr.getPort());
+    }
+
+    private IDatasetInputChannelMonitor getMonitor(int partition) throws HyracksException {
+        if (knownRecords == null || knownRecords[partition] == null) {
+            throw new HyracksException("Accessing monitors before the obtaining the corresponding addresses.");
+        }
+        if (monitors == null) {
+            monitors = new DatasetInputChannelMonitor[knownRecords.length];
+        }
+        if (monitors[partition] == null) {
+            monitors[partition] = new DatasetInputChannelMonitor();
+        }
+        return monitors[partition];
+    }
+
+    private class DatasetInputChannelMonitor implements IDatasetInputChannelMonitor {
+        private final AtomicInteger nAvailableFrames;
+
+        private final AtomicBoolean eos;
+
+        private final AtomicBoolean failed;
+
+        public DatasetInputChannelMonitor() {
+            nAvailableFrames = new AtomicInteger(0);
+            eos = new AtomicBoolean(false);
+            failed = new AtomicBoolean(false);
+        }
+
+        @Override
+        public synchronized void notifyFailure(IInputChannel channel) {
+            failed.set(true);
+            notifyAll();
+        }
+
+        @Override
+        public synchronized void notifyDataAvailability(IInputChannel channel, int nFrames) {
+            nAvailableFrames.addAndGet(nFrames);
+            notifyAll();
+        }
+
+        @Override
+        public synchronized void notifyEndOfStream(IInputChannel channel) {
+            eos.set(true);
+            notifyAll();
+        }
+
+        @Override
+        public synchronized boolean eosReached() {
+            return eos.get();
+        }
+
+        @Override
+        public synchronized boolean failed() {
+            return failed.get();
+        }
+
+        @Override
+        public synchronized int getNFramesAvailable() {
+            return nAvailableFrames.get();
+        }
+
+        @Override
+        public synchronized void notifyFrameRead() {
+            nAvailableFrames.decrementAndGet();
+        }
+
+    }
+}
diff --git a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/net/ClientNetworkManager.java b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/net/ClientNetworkManager.java
new file mode 100644
index 0000000..7aef8b9
--- /dev/null
+++ b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/net/ClientNetworkManager.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.client.net;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+
+import edu.uci.ics.hyracks.comm.channels.IChannelConnectionFactory;
+import edu.uci.ics.hyracks.net.exceptions.NetException;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.ChannelControlBlock;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.MultiplexedConnection;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.MuxDemux;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
+
+public class ClientNetworkManager implements IChannelConnectionFactory {
+    private static final int MAX_CONNECTION_ATTEMPTS = 5;
+
+    private final MuxDemux md;
+
+    public ClientNetworkManager(int nThreads) throws IOException {
+        /* This is a connect only socket and does not listen to any incoming connections, so pass null to
+         * localAddress and listener.
+         */
+        md = new MuxDemux(null, null, nThreads, MAX_CONNECTION_ATTEMPTS);
+    }
+
+    public void start() throws IOException {
+        md.start();
+    }
+
+    public void stop() {
+
+    }
+
+    public ChannelControlBlock connect(SocketAddress remoteAddress) throws InterruptedException, NetException {
+        MultiplexedConnection mConn = md.connect((InetSocketAddress) remoteAddress);
+        return mConn.openChannel();
+    }
+
+    public MuxDemuxPerformanceCounters getPerformanceCounters() {
+        return md.getPerformanceCounters();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-comm/pom.xml b/hyracks/hyracks-comm/pom.xml
new file mode 100644
index 0000000..c3583699
--- /dev/null
+++ b/hyracks/hyracks-comm/pom.xml
@@ -0,0 +1,36 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-comm</artifactId>
+  <name>hyracks-comm</name>
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+ <dependency>
+ 	<groupId>edu.uci.ics.hyracks</groupId>
+ 	<artifactId>hyracks-api</artifactId>
+ 	<version>0.2.3-SNAPSHOT</version>
+ </dependency>
+  <dependency>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-net</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+ </dependency>
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/DatasetNetworkInputChannel.java b/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/DatasetNetworkInputChannel.java
new file mode 100644
index 0000000..fac2949
--- /dev/null
+++ b/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/DatasetNetworkInputChannel.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.comm.channels;
+
+import java.net.SocketAddress;
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.Queue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.channels.IInputChannel;
+import edu.uci.ics.hyracks.api.channels.IInputChannelMonitor;
+import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.net.buffers.IBufferAcceptor;
+import edu.uci.ics.hyracks.net.buffers.ICloseableBufferAcceptor;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.ChannelControlBlock;
+
+public class DatasetNetworkInputChannel implements IInputChannel {
+    private static final Logger LOGGER = Logger.getLogger(DatasetNetworkInputChannel.class.getName());
+
+    static final int INITIAL_MESSAGE_SIZE = 20;
+
+    private final IChannelConnectionFactory netManager;
+
+    private final SocketAddress remoteAddress;
+
+    private final JobId jobId;
+
+    private final int partition;
+
+    private final Queue<ByteBuffer> fullQueue;
+
+    private final int nBuffers;
+
+    private ChannelControlBlock ccb;
+
+    private IInputChannelMonitor monitor;
+
+    private Object attachment;
+
+    public DatasetNetworkInputChannel(IChannelConnectionFactory netManager, SocketAddress remoteAddress, JobId jobId,
+            int partition, int nBuffers) {
+        this.netManager = netManager;
+        this.remoteAddress = remoteAddress;
+        this.jobId = jobId;
+        this.partition = partition;
+        fullQueue = new ArrayDeque<ByteBuffer>(nBuffers);
+        this.nBuffers = nBuffers;
+    }
+
+    @Override
+    public void registerMonitor(IInputChannelMonitor monitor) {
+        this.monitor = monitor;
+    }
+
+    @Override
+    public void setAttachment(Object attachment) {
+        this.attachment = attachment;
+    }
+
+    @Override
+    public Object getAttachment() {
+        return attachment;
+    }
+
+    @Override
+    public synchronized ByteBuffer getNextBuffer() {
+        return fullQueue.poll();
+    }
+
+    @Override
+    public void recycleBuffer(ByteBuffer buffer) {
+        buffer.clear();
+        ccb.getReadInterface().getEmptyBufferAcceptor().accept(buffer);
+    }
+
+    @Override
+    public void open(IHyracksCommonContext ctx) throws HyracksDataException {
+        try {
+            ccb = netManager.connect(remoteAddress);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+        ccb.getReadInterface().setFullBufferAcceptor(new ReadFullBufferAcceptor());
+        ccb.getWriteInterface().setEmptyBufferAcceptor(new WriteEmptyBufferAcceptor());
+        for (int i = 0; i < nBuffers; ++i) {
+            ccb.getReadInterface().getEmptyBufferAcceptor().accept(ctx.allocateFrame());
+        }
+        ByteBuffer writeBuffer = ByteBuffer.allocate(INITIAL_MESSAGE_SIZE);
+        writeBuffer.putLong(jobId.getId());
+        writeBuffer.putInt(partition);
+        writeBuffer.flip();
+        if (LOGGER.isLoggable(Level.FINE)) {
+            LOGGER.fine("Sending partition request for JobId: " + jobId + " partition: " + partition + " on channel: "
+                    + ccb);
+        }
+        ccb.getWriteInterface().getFullBufferAcceptor().accept(writeBuffer);
+        ccb.getWriteInterface().getFullBufferAcceptor().close();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+
+    }
+
+    private class ReadFullBufferAcceptor implements ICloseableBufferAcceptor {
+        @Override
+        public void accept(ByteBuffer buffer) {
+            fullQueue.add(buffer);
+            monitor.notifyDataAvailability(DatasetNetworkInputChannel.this, 1);
+        }
+
+        @Override
+        public void close() {
+            monitor.notifyEndOfStream(DatasetNetworkInputChannel.this);
+        }
+
+        @Override
+        public void error(int ecode) {
+            monitor.notifyFailure(DatasetNetworkInputChannel.this);
+        }
+    }
+
+    private class WriteEmptyBufferAcceptor implements IBufferAcceptor {
+        @Override
+        public void accept(ByteBuffer buffer) {
+            // do nothing
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/IChannelConnectionFactory.java b/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/IChannelConnectionFactory.java
new file mode 100644
index 0000000..33179ba
--- /dev/null
+++ b/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/IChannelConnectionFactory.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.comm.channels;
+
+import java.net.SocketAddress;
+
+import edu.uci.ics.hyracks.net.exceptions.NetException;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.ChannelControlBlock;
+
+public interface IChannelConnectionFactory {
+    public ChannelControlBlock connect(SocketAddress remoteAddress) throws InterruptedException, NetException;
+}
diff --git a/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/NetworkInputChannel.java b/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/NetworkInputChannel.java
new file mode 100644
index 0000000..aa37b16
--- /dev/null
+++ b/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/NetworkInputChannel.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.comm.channels;
+
+import java.net.SocketAddress;
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.Queue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.channels.IInputChannel;
+import edu.uci.ics.hyracks.api.channels.IInputChannelMonitor;
+import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.partitions.PartitionId;
+import edu.uci.ics.hyracks.net.buffers.IBufferAcceptor;
+import edu.uci.ics.hyracks.net.buffers.ICloseableBufferAcceptor;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.ChannelControlBlock;
+
+public class NetworkInputChannel implements IInputChannel {
+    private static final Logger LOGGER = Logger.getLogger(NetworkInputChannel.class.getName());
+
+    static final int INITIAL_MESSAGE_SIZE = 20;
+
+    private final IChannelConnectionFactory netManager;
+
+    private final SocketAddress remoteAddress;
+
+    private final PartitionId partitionId;
+
+    private final Queue<ByteBuffer> fullQueue;
+
+    private final int nBuffers;
+
+    private ChannelControlBlock ccb;
+
+    private IInputChannelMonitor monitor;
+
+    private Object attachment;
+
+    public NetworkInputChannel(IChannelConnectionFactory netManager, SocketAddress remoteAddress,
+            PartitionId partitionId, int nBuffers) {
+        this.netManager = netManager;
+        this.remoteAddress = remoteAddress;
+        this.partitionId = partitionId;
+        fullQueue = new ArrayDeque<ByteBuffer>(nBuffers);
+        this.nBuffers = nBuffers;
+    }
+
+    @Override
+    public void registerMonitor(IInputChannelMonitor monitor) {
+        this.monitor = monitor;
+    }
+
+    @Override
+    public void setAttachment(Object attachment) {
+        this.attachment = attachment;
+    }
+
+    @Override
+    public Object getAttachment() {
+        return attachment;
+    }
+
+    @Override
+    public synchronized ByteBuffer getNextBuffer() {
+        return fullQueue.poll();
+    }
+
+    @Override
+    public void recycleBuffer(ByteBuffer buffer) {
+        buffer.clear();
+        ccb.getReadInterface().getEmptyBufferAcceptor().accept(buffer);
+    }
+
+    @Override
+    public void open(IHyracksCommonContext ctx) throws HyracksDataException {
+        try {
+            ccb = netManager.connect(remoteAddress);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+        ccb.getReadInterface().setFullBufferAcceptor(new ReadFullBufferAcceptor());
+        ccb.getWriteInterface().setEmptyBufferAcceptor(new WriteEmptyBufferAcceptor());
+        for (int i = 0; i < nBuffers; ++i) {
+            ccb.getReadInterface().getEmptyBufferAcceptor().accept(ctx.allocateFrame());
+        }
+        ByteBuffer writeBuffer = ByteBuffer.allocate(INITIAL_MESSAGE_SIZE);
+        writeBuffer.putLong(partitionId.getJobId().getId());
+        writeBuffer.putInt(partitionId.getConnectorDescriptorId().getId());
+        writeBuffer.putInt(partitionId.getSenderIndex());
+        writeBuffer.putInt(partitionId.getReceiverIndex());
+        writeBuffer.flip();
+        if (LOGGER.isLoggable(Level.FINE)) {
+            LOGGER.fine("Sending partition request: " + partitionId + " on channel: " + ccb);
+        }
+        ccb.getWriteInterface().getFullBufferAcceptor().accept(writeBuffer);
+        ccb.getWriteInterface().getFullBufferAcceptor().close();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+
+    }
+
+    private class ReadFullBufferAcceptor implements ICloseableBufferAcceptor {
+        @Override
+        public void accept(ByteBuffer buffer) {
+            fullQueue.add(buffer);
+            monitor.notifyDataAvailability(NetworkInputChannel.this, 1);
+        }
+
+        @Override
+        public void close() {
+            monitor.notifyEndOfStream(NetworkInputChannel.this);
+        }
+
+        @Override
+        public void error(int ecode) {
+            monitor.notifyFailure(NetworkInputChannel.this);
+        }
+    }
+
+    private class WriteEmptyBufferAcceptor implements IBufferAcceptor {
+        @Override
+        public void accept(ByteBuffer buffer) {
+            // do nothing
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/NetworkOutputChannel.java b/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/NetworkOutputChannel.java
new file mode 100644
index 0000000..812a2de
--- /dev/null
+++ b/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/NetworkOutputChannel.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.comm.channels;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.Deque;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.net.buffers.IBufferAcceptor;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.ChannelControlBlock;
+
+public class NetworkOutputChannel implements IFrameWriter {
+    private final ChannelControlBlock ccb;
+
+    private final int nBuffers;
+
+    private final Deque<ByteBuffer> emptyStack;
+
+    private boolean aborted;
+
+    public NetworkOutputChannel(ChannelControlBlock ccb, int nBuffers) {
+        this.ccb = ccb;
+        this.nBuffers = nBuffers;
+        emptyStack = new ArrayDeque<ByteBuffer>(nBuffers);
+        ccb.getWriteInterface().setEmptyBufferAcceptor(new WriteEmptyBufferAcceptor());
+    }
+
+    public void setFrameSize(int frameSize) {
+        for (int i = 0; i < nBuffers; ++i) {
+            emptyStack.push(ByteBuffer.allocateDirect(frameSize));
+        }
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        ByteBuffer destBuffer = null;
+        synchronized (this) {
+            while (true) {
+                if (aborted) {
+                    throw new HyracksDataException("Connection has been aborted");
+                }
+                destBuffer = emptyStack.poll();
+                if (destBuffer != null) {
+                    break;
+                }
+                try {
+                    wait();
+                } catch (InterruptedException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+        }
+        buffer.position(0);
+        buffer.limit(destBuffer.capacity());
+        destBuffer.clear();
+        destBuffer.put(buffer);
+        destBuffer.flip();
+        ccb.getWriteInterface().getFullBufferAcceptor().accept(destBuffer);
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        ccb.getWriteInterface().getFullBufferAcceptor().error(1);
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        ccb.getWriteInterface().getFullBufferAcceptor().close();
+    }
+
+    public void abort() {
+        ccb.getWriteInterface().getFullBufferAcceptor().error(1);
+        synchronized (NetworkOutputChannel.this) {
+            aborted = true;
+            NetworkOutputChannel.this.notifyAll();
+        }
+    }
+
+    private class WriteEmptyBufferAcceptor implements IBufferAcceptor {
+        @Override
+        public void accept(ByteBuffer buffer) {
+            synchronized (NetworkOutputChannel.this) {
+                emptyStack.push(buffer);
+                NetworkOutputChannel.this.notifyAll();
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-cc/pom.xml b/hyracks/hyracks-control/hyracks-control-cc/pom.xml
index c7eedb3..d644673 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/pom.xml
+++ b/hyracks/hyracks-control/hyracks-control-cc/pom.xml
@@ -15,8 +15,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
index 5a33891..82457fe 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
@@ -35,12 +35,17 @@
 import edu.uci.ics.hyracks.api.client.ClusterControllerInfo;
 import edu.uci.ics.hyracks.api.client.HyracksClientInterfaceFunctions;
 import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
 import edu.uci.ics.hyracks.api.context.ICCContext;
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.IDatasetDirectoryService;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.job.JobStatus;
 import edu.uci.ics.hyracks.api.topology.ClusterTopology;
 import edu.uci.ics.hyracks.api.topology.TopologyDefinitionParser;
 import edu.uci.ics.hyracks.control.cc.application.CCApplicationContext;
+import edu.uci.ics.hyracks.control.cc.dataset.DatasetDirectoryService;
 import edu.uci.ics.hyracks.control.cc.job.JobRun;
 import edu.uci.ics.hyracks.control.cc.web.WebServer;
 import edu.uci.ics.hyracks.control.cc.work.ApplicationCreateWork;
@@ -48,17 +53,23 @@
 import edu.uci.ics.hyracks.control.cc.work.ApplicationMessageWork;
 import edu.uci.ics.hyracks.control.cc.work.ApplicationStartWork;
 import edu.uci.ics.hyracks.control.cc.work.ApplicationStateChangeWork;
+import edu.uci.ics.hyracks.control.cc.work.GetDatasetDirectoryServiceInfoWork;
 import edu.uci.ics.hyracks.control.cc.work.GetIpAddressNodeNameMapWork;
 import edu.uci.ics.hyracks.control.cc.work.GetJobStatusWork;
 import edu.uci.ics.hyracks.control.cc.work.GetNodeControllersInfoWork;
+import edu.uci.ics.hyracks.control.cc.work.GetResultPartitionLocationsWork;
+import edu.uci.ics.hyracks.control.cc.work.GetResultStatusWork;
 import edu.uci.ics.hyracks.control.cc.work.JobStartWork;
 import edu.uci.ics.hyracks.control.cc.work.JobletCleanupNotificationWork;
 import edu.uci.ics.hyracks.control.cc.work.NodeHeartbeatWork;
 import edu.uci.ics.hyracks.control.cc.work.RegisterNodeWork;
 import edu.uci.ics.hyracks.control.cc.work.RegisterPartitionAvailibilityWork;
 import edu.uci.ics.hyracks.control.cc.work.RegisterPartitionRequestWork;
+import edu.uci.ics.hyracks.control.cc.work.RegisterResultPartitionLocationWork;
 import edu.uci.ics.hyracks.control.cc.work.RemoveDeadNodesWork;
 import edu.uci.ics.hyracks.control.cc.work.ReportProfilesWork;
+import edu.uci.ics.hyracks.control.cc.work.ReportResultPartitionFailureWork;
+import edu.uci.ics.hyracks.control.cc.work.ReportResultPartitionWriteCompletionWork;
 import edu.uci.ics.hyracks.control.cc.work.TaskCompleteWork;
 import edu.uci.ics.hyracks.control.cc.work.TaskFailureWork;
 import edu.uci.ics.hyracks.control.cc.work.UnregisterNodeWork;
@@ -115,6 +126,8 @@
 
     private final DeadNodeSweeper sweeper;
 
+    private final IDatasetDirectoryService datasetDirectoryService;
+
     private long jobCounter;
 
     public ClusterControllerService(final CCConfig ccConfig) throws Exception {
@@ -162,6 +175,7 @@
             }
         };
         sweeper = new DeadNodeSweeper();
+        datasetDirectoryService = new DatasetDirectoryService();
         jobCounter = 0;
     }
 
@@ -264,6 +278,10 @@
         return clusterIPC;
     }
 
+    public NetworkAddress getDatasetDirectoryServiceInfo() {
+        return new NetworkAddress(ccConfig.clientNetIpAddress.getBytes(), ccConfig.clientNetPort);
+    }
+
     private class DeadNodeSweeper extends TimerTask {
         @Override
         public void run() {
@@ -271,6 +289,10 @@
         }
     }
 
+    public IDatasetDirectoryService getDatasetDirectoryService() {
+        return datasetDirectoryService;
+    }
+
     private class HyracksClientInterfaceIPCI implements IIPCI {
         @Override
         public void deliverIncomingMessage(IIPCHandle handle, long mid, long rmid, Object payload, Exception exception) {
@@ -321,6 +343,27 @@
                     return;
                 }
 
+                case GET_DATASET_DIRECTORY_SERIVICE_INFO: {
+                    workQueue.schedule(new GetDatasetDirectoryServiceInfoWork(ClusterControllerService.this,
+                            new IPCResponder<NetworkAddress>(handle, mid)));
+                    return;
+                }
+
+                case GET_DATASET_RESULT_STATUS: {
+                    HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction gdrlf = (HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction) fn;
+                    workQueue.schedule(new GetResultStatusWork(ClusterControllerService.this, gdrlf.getJobId(), gdrlf
+                            .getResultSetId(), new IPCResponder<Status>(handle, mid)));
+                    return;
+                }
+
+                case GET_DATASET_RESULT_LOCATIONS: {
+                    HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction gdrlf = (HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction) fn;
+                    workQueue.schedule(new GetResultPartitionLocationsWork(ClusterControllerService.this, gdrlf
+                            .getJobId(), gdrlf.getResultSetId(), gdrlf.getKnownRecords(),
+                            new IPCResponder<DatasetDirectoryRecord[]>(handle, mid)));
+                    return;
+                }
+
                 case WAIT_FOR_COMPLETION: {
                     HyracksClientInterfaceFunctions.WaitForCompletionFunction wfcf = (HyracksClientInterfaceFunctions.WaitForCompletionFunction) fn;
                     workQueue.schedule(new WaitForJobCompletionWork(ClusterControllerService.this, wfcf.getJobId(),
@@ -416,6 +459,28 @@
                     return;
                 }
 
+                case REGISTER_RESULT_PARTITION_LOCATION: {
+                    CCNCFunctions.RegisterResultPartitionLocationFunction rrplf = (CCNCFunctions.RegisterResultPartitionLocationFunction) fn;
+                    workQueue.schedule(new RegisterResultPartitionLocationWork(ClusterControllerService.this, rrplf
+                            .getJobId(), rrplf.getResultSetId(), rrplf.getOrderedResult(), rrplf.getPartition(), rrplf
+                            .getNPartitions(), rrplf.getNetworkAddress()));
+                    return;
+                }
+
+                case REPORT_RESULT_PARTITION_WRITE_COMPLETION: {
+                    CCNCFunctions.ReportResultPartitionWriteCompletionFunction rrplf = (CCNCFunctions.ReportResultPartitionWriteCompletionFunction) fn;
+                    workQueue.schedule(new ReportResultPartitionWriteCompletionWork(ClusterControllerService.this,
+                            rrplf.getJobId(), rrplf.getResultSetId(), rrplf.getPartition()));
+                    return;
+                }
+
+                case REPORT_RESULT_PARTITION_FAILURE: {
+                    CCNCFunctions.ReportResultPartitionFailureFunction rrplf = (CCNCFunctions.ReportResultPartitionFailureFunction) fn;
+                    workQueue.schedule(new ReportResultPartitionFailureWork(ClusterControllerService.this, rrplf
+                            .getJobId(), rrplf.getResultSetId(), rrplf.getPartition()));
+                    return;
+                }
+
                 case APPLICATION_STATE_CHANGE_RESPONSE: {
                     CCNCFunctions.ApplicationStateChangeResponseFunction astrf = (CCNCFunctions.ApplicationStateChangeResponseFunction) fn;
                     workQueue.schedule(new ApplicationStateChangeWork(ClusterControllerService.this, astrf));
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/NodeControllerState.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/NodeControllerState.java
index c17acd0..c96a319 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/NodeControllerState.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/NodeControllerState.java
@@ -41,6 +41,8 @@
 
     private final NetworkAddress dataPort;
 
+    private final NetworkAddress datasetPort;
+
     private final Set<JobId> activeJobIds;
 
     private final String osName;
@@ -107,6 +109,14 @@
 
     private final long[] netSignalingBytesWritten;
 
+    private final long[] datasetNetPayloadBytesRead;
+
+    private final long[] datasetNetPayloadBytesWritten;
+
+    private final long[] datasetNetSignalingBytesRead;
+
+    private final long[] datasetNetSignalingBytesWritten;
+
     private final long[] ipcMessagesSent;
 
     private final long[] ipcMessageBytesSent;
@@ -123,6 +133,7 @@
         this.nodeController = nodeController;
         ncConfig = reg.getNCConfig();
         dataPort = reg.getDataPort();
+        datasetPort = reg.getDatasetPort();
         activeJobIds = new HashSet<JobId>();
 
         osName = reg.getOSName();
@@ -164,6 +175,10 @@
         netPayloadBytesWritten = new long[RRD_SIZE];
         netSignalingBytesRead = new long[RRD_SIZE];
         netSignalingBytesWritten = new long[RRD_SIZE];
+        datasetNetPayloadBytesRead = new long[RRD_SIZE];
+        datasetNetPayloadBytesWritten = new long[RRD_SIZE];
+        datasetNetSignalingBytesRead = new long[RRD_SIZE];
+        datasetNetSignalingBytesWritten = new long[RRD_SIZE];
         ipcMessagesSent = new long[RRD_SIZE];
         ipcMessageBytesSent = new long[RRD_SIZE];
         ipcMessagesReceived = new long[RRD_SIZE];
@@ -196,6 +211,10 @@
         netPayloadBytesWritten[rrdPtr] = hbData.netPayloadBytesWritten;
         netSignalingBytesRead[rrdPtr] = hbData.netSignalingBytesRead;
         netSignalingBytesWritten[rrdPtr] = hbData.netSignalingBytesWritten;
+        datasetNetPayloadBytesRead[rrdPtr] = hbData.datasetNetPayloadBytesRead;
+        datasetNetPayloadBytesWritten[rrdPtr] = hbData.datasetNetPayloadBytesWritten;
+        datasetNetSignalingBytesRead[rrdPtr] = hbData.datasetNetSignalingBytesRead;
+        datasetNetSignalingBytesWritten[rrdPtr] = hbData.datasetNetSignalingBytesWritten;
         ipcMessagesSent[rrdPtr] = hbData.ipcMessagesSent;
         ipcMessageBytesSent[rrdPtr] = hbData.ipcMessageBytesSent;
         ipcMessagesReceived[rrdPtr] = hbData.ipcMessagesReceived;
@@ -227,6 +246,10 @@
         return dataPort;
     }
 
+    public NetworkAddress getDatasetPort() {
+        return datasetPort;
+    }
+
     public JSONObject toSummaryJSON() throws JSONException {
         JSONObject o = new JSONObject();
         o.put("node-id", ncConfig.nodeId);
@@ -271,6 +294,10 @@
         o.put("net-payload-bytes-written", netPayloadBytesWritten);
         o.put("net-signaling-bytes-read", netSignalingBytesRead);
         o.put("net-signaling-bytes-written", netSignalingBytesWritten);
+        o.put("dataset-net-payload-bytes-read", datasetNetPayloadBytesRead);
+        o.put("dataset-net-payload-bytes-written", datasetNetPayloadBytesWritten);
+        o.put("dataset-net-signaling-bytes-read", datasetNetSignalingBytesRead);
+        o.put("dataset-net-signaling-bytes-written", datasetNetSignalingBytesWritten);
         o.put("ipc-messages-sent", ipcMessagesSent);
         o.put("ipc-message-bytes-sent", ipcMessageBytesSent);
         o.put("ipc-messages-received", ipcMessagesReceived);
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/dataset/DatasetDirectoryService.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/dataset/DatasetDirectoryService.java
new file mode 100644
index 0000000..13d0c30
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/dataset/DatasetDirectoryService.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.cc.dataset;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.IDatasetDirectoryService;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+
+/**
+ * TODO(madhusudancs): The potential perils of this global dataset directory service implementation is that, the jobs
+ * location information is never evicted from the memory and the memory usage grows as the number of jobs in the system
+ * grows. What we should possibly do is, add an API call for the client to say that it received everything it has to for
+ * the job (after it receives all the results) completely. Then we can just get rid of the location information for that
+ * job.
+ */
+public class DatasetDirectoryService implements IDatasetDirectoryService {
+    private final Map<JobId, Map<ResultSetId, ResultSetMetaData>> jobResultLocationsMap;
+
+    public DatasetDirectoryService() {
+        jobResultLocationsMap = new HashMap<JobId, Map<ResultSetId, ResultSetMetaData>>();
+    }
+
+    @Override
+    public synchronized void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult,
+            int partition, int nPartitions, NetworkAddress networkAddress) {
+        Map<ResultSetId, ResultSetMetaData> rsMap = jobResultLocationsMap.get(jobId);
+        if (rsMap == null) {
+            rsMap = new HashMap<ResultSetId, ResultSetMetaData>();
+            jobResultLocationsMap.put(jobId, rsMap);
+        }
+
+        ResultSetMetaData resultSetMetaData = rsMap.get(rsId);
+        if (resultSetMetaData == null) {
+            resultSetMetaData = new ResultSetMetaData(orderedResult, new DatasetDirectoryRecord[nPartitions]);
+            rsMap.put(rsId, resultSetMetaData);
+        }
+
+        DatasetDirectoryRecord[] records = resultSetMetaData.getRecords();
+        if (records[partition] == null) {
+            records[partition] = new DatasetDirectoryRecord();
+        }
+        records[partition].setNetworkAddress(networkAddress);
+        records[partition].start();
+        notifyAll();
+    }
+
+    @Override
+    public synchronized void reportResultPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) {
+        DatasetDirectoryRecord ddr = getDatasetDirectoryRecord(jobId, rsId, partition);
+        ddr.writeEOS();
+    }
+
+    @Override
+    public synchronized void reportResultPartitionFailure(JobId jobId, ResultSetId rsId, int partition) {
+        DatasetDirectoryRecord ddr = getDatasetDirectoryRecord(jobId, rsId, partition);
+        ddr.fail();
+    }
+
+    @Override
+    public synchronized Status getResultStatus(JobId jobId, ResultSetId rsId) throws HyracksDataException {
+        Map<ResultSetId, ResultSetMetaData> rsMap;
+        while ((rsMap = jobResultLocationsMap.get(jobId)) == null) {
+            try {
+                wait();
+            } catch (InterruptedException e) {
+                throw new HyracksDataException(e);
+            }
+        }
+
+        ResultSetMetaData resultSetMetaData = rsMap.get(rsId);
+        if (resultSetMetaData == null || resultSetMetaData.getRecords() == null) {
+            throw new HyracksDataException("ResultSet locations uninitialized when it is expected to be initialized.");
+        }
+        DatasetDirectoryRecord[] records = resultSetMetaData.getRecords();
+
+        ArrayList<Status> statuses = new ArrayList<Status>(records.length);
+        for (int i = 0; i < records.length; i++) {
+            statuses.add(records[i].getStatus());
+        }
+
+        // Default status is idle
+        Status status = Status.IDLE;
+        if (statuses.contains(Status.FAILED)) {
+            // Even if there is at least one failed entry we should return failed status.
+            return Status.FAILED;
+        } else if (statuses.contains(Status.RUNNING)) {
+            // If there are not failed entry and if there is at least one running entry we should return running status.
+            return Status.RUNNING;
+        } else {
+            // If each and every partition has reported success do we report success as the status.
+            int successCount = 0;
+            for (int i = 0; i < statuses.size(); i++) {
+                if (statuses.get(i) == Status.SUCCESS) {
+                    successCount++;
+                }
+            }
+            if (successCount == statuses.size()) {
+                return Status.SUCCESS;
+            }
+        }
+        return status;
+    }
+
+    @Override
+    public synchronized DatasetDirectoryRecord[] getResultPartitionLocations(JobId jobId, ResultSetId rsId,
+            DatasetDirectoryRecord[] knownRecords) throws HyracksDataException {
+        DatasetDirectoryRecord[] newRecords;
+        while ((newRecords = updatedRecords(jobId, rsId, knownRecords)) == null) {
+            try {
+                wait();
+            } catch (InterruptedException e) {
+                throw new HyracksDataException(e);
+            }
+        }
+        return newRecords;
+    }
+
+    public DatasetDirectoryRecord getDatasetDirectoryRecord(JobId jobId, ResultSetId rsId, int partition) {
+        Map<ResultSetId, ResultSetMetaData> rsMap = jobResultLocationsMap.get(jobId);
+        ResultSetMetaData resultSetMetaData = rsMap.get(rsId);
+        DatasetDirectoryRecord[] records = resultSetMetaData.getRecords();
+        return records[partition];
+    }
+
+    /**
+     * Compares the records already known by the client for the given job's result set id with the records that the
+     * dataset directory service knows and if there are any newly discovered records returns a whole array with the
+     * new records filled in.
+     * This method has a very convoluted logic. Here is the explanation of how it works.
+     * If the ordering constraint has to be enforced, the method obtains the first null record in the known records in
+     * the order of the partitions. It always traverses the array in the first to last order!
+     * If known records array or the first element in that array is null in the but the record for that partition now
+     * known to the directory service, the method fills in that record in the array and returns the array back.
+     * However, if the first known null record is not a first element in the array, by induction, all the previous
+     * known records should be known already be known to client and none of the records for the partitions ahead is
+     * known by the client yet. So, we check if the client has reached the end of stream for the partition corresponding
+     * to the record before the first known null record, i.e. the last known non-null record. If not, we just return
+     * null because we cannot expose any new locations until the client reaches end of stream for the last known record.
+     * If the client has reached the end of stream record for the last known non-null record, we check if the next record
+     * is discovered by the dataset directory service and if so, we fill it in the records array and return it back or
+     * send null otherwise.
+     * If the ordering is not required, we are free to return any newly discovered records back, so we just check if
+     * arrays are equal and if they are not we send the entire new updated array.
+     * 
+     * @param jobId
+     *            - Id of the job for which the directory records should be retrieved.
+     * @param rsId
+     *            - Id of the result set for which the directory records should be retrieved.
+     * @param knownRecords
+     *            - An array of directory records that the client is already aware of.
+     * @return
+     *         - Returns null if there aren't any newly discovered partitions enforcing the ordering constraint
+     * @throws HyracksDataException
+     *             TODO(madhusudancs): Think about caching (and still be stateless) instead of this ugly O(n) iterations for
+     *             every check. This already looks very expensive.
+     */
+    private DatasetDirectoryRecord[] updatedRecords(JobId jobId, ResultSetId rsId, DatasetDirectoryRecord[] knownRecords)
+            throws HyracksDataException {
+        Map<ResultSetId, ResultSetMetaData> rsMap = jobResultLocationsMap.get(jobId);
+        if (rsMap == null) {
+            return null;
+        }
+
+        ResultSetMetaData resultSetMetaData = rsMap.get(rsId);
+        if (resultSetMetaData == null || resultSetMetaData.getRecords() == null) {
+            throw new HyracksDataException("ResultSet locations uninitialized when it is expected to be initialized.");
+        }
+
+        boolean ordered = resultSetMetaData.getOrderedResult();
+        DatasetDirectoryRecord[] records = resultSetMetaData.getRecords();
+        /* If ordering is required, we should expose the dataset directory records only in the order, otherwise
+         * we can simply check if there are any newly discovered records and send the whole array back if there are.
+         */
+        if (ordered) {
+            // Iterate over the known records and find the last record which is not null.
+            int i = 0;
+            for (i = 0; i < records.length; i++) {
+                if (knownRecords == null) {
+                    if (records[0] != null) {
+                        knownRecords = new DatasetDirectoryRecord[records.length];
+                        knownRecords[0] = records[0];
+                        return knownRecords;
+                    }
+                    return null;
+                }
+                if (knownRecords[i] == null) {
+                    if ((i == 0 || knownRecords[i - 1].hasReachedReadEOS()) && records[i] != null) {
+                        knownRecords[i] = records[i];
+                        return knownRecords;
+                    }
+                    return null;
+                }
+            }
+        } else {
+            if (!Arrays.equals(records, knownRecords)) {
+                return records;
+            }
+        }
+        return null;
+    }
+
+    private class ResultSetMetaData {
+        private final boolean ordered;
+
+        private final DatasetDirectoryRecord[] records;
+
+        public ResultSetMetaData(boolean ordered, DatasetDirectoryRecord[] records) {
+            this.ordered = ordered;
+            this.records = records;
+        }
+
+        public boolean getOrderedResult() {
+            return ordered;
+        }
+
+        public DatasetDirectoryRecord[] getRecords() {
+            return records;
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetDatasetDirectoryServiceInfoWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetDatasetDirectoryServiceInfoWork.java
new file mode 100644
index 0000000..3ac6acc
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetDatasetDirectoryServiceInfoWork.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.cc.work;
+
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.work.IResultCallback;
+import edu.uci.ics.hyracks.control.common.work.SynchronizableWork;
+
+public class GetDatasetDirectoryServiceInfoWork extends SynchronizableWork {
+    private final ClusterControllerService ccs;
+
+    private final IResultCallback<NetworkAddress> callback;
+
+    public GetDatasetDirectoryServiceInfoWork(ClusterControllerService ccs, IResultCallback<NetworkAddress> callback) {
+        this.ccs = ccs;
+        this.callback = callback;
+    }
+
+    @Override
+    public void doRun() {
+        try {
+            NetworkAddress addr = ccs.getDatasetDirectoryServiceInfo();
+            callback.setValue(addr);
+        } catch (Exception e) {
+            callback.setException(e);
+        }
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeControllersInfoWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeControllersInfoWork.java
index 2f23a2c..a787b9f 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeControllersInfoWork.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetNodeControllersInfoWork.java
@@ -39,7 +39,8 @@
         Map<String, NodeControllerInfo> result = new LinkedHashMap<String, NodeControllerInfo>();
         Map<String, NodeControllerState> nodeMap = ccs.getNodeMap();
         for (Map.Entry<String, NodeControllerState> e : nodeMap.entrySet()) {
-            result.put(e.getKey(), new NodeControllerInfo(e.getKey(), NodeStatus.ALIVE, e.getValue().getDataPort()));
+            result.put(e.getKey(), new NodeControllerInfo(e.getKey(), NodeStatus.ALIVE, e.getValue().getDataPort(), e
+                    .getValue().getDatasetPort()));
         }
         callback.setValue(result);
     }
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetResultPartitionLocationsWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetResultPartitionLocationsWork.java
new file mode 100644
index 0000000..fd1d418
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetResultPartitionLocationsWork.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.cc.work;
+
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
+import edu.uci.ics.hyracks.api.dataset.IDatasetDirectoryService;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.work.IResultCallback;
+import edu.uci.ics.hyracks.control.common.work.SynchronizableWork;
+
+public class GetResultPartitionLocationsWork extends SynchronizableWork {
+    private final ClusterControllerService ccs;
+
+    private final JobId jobId;
+
+    private final ResultSetId rsId;
+
+    private final DatasetDirectoryRecord[] knownRecords;
+
+    private final IResultCallback<DatasetDirectoryRecord[]> callback;
+
+    public GetResultPartitionLocationsWork(ClusterControllerService ccs, JobId jobId, ResultSetId rsId,
+            DatasetDirectoryRecord[] knownRecords, IResultCallback<DatasetDirectoryRecord[]> callback) {
+        this.ccs = ccs;
+        this.jobId = jobId;
+        this.rsId = rsId;
+        this.knownRecords = knownRecords;
+        this.callback = callback;
+    }
+
+    @Override
+    public void doRun() {
+        final IDatasetDirectoryService dds = ccs.getDatasetDirectoryService();
+        ccs.getExecutor().execute(new Runnable() {
+            @Override
+            public void run() {
+                try {
+                    DatasetDirectoryRecord[] partitionLocations = dds.getResultPartitionLocations(jobId, rsId,
+                            knownRecords);
+                    callback.setValue(partitionLocations);
+                } catch (HyracksDataException e) {
+                    callback.setException(e);
+                }
+            }
+        });
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetResultStatusWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetResultStatusWork.java
new file mode 100644
index 0000000..d2dadf5
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/GetResultStatusWork.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.cc.work;
+
+import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.work.IResultCallback;
+import edu.uci.ics.hyracks.control.common.work.SynchronizableWork;
+
+public class GetResultStatusWork extends SynchronizableWork {
+    private final ClusterControllerService ccs;
+
+    private final JobId jobId;
+
+    private final ResultSetId rsId;
+
+    private final IResultCallback<Status> callback;
+
+    public GetResultStatusWork(ClusterControllerService ccs, JobId jobId, ResultSetId rsId,
+            IResultCallback<Status> callback) {
+        this.ccs = ccs;
+        this.jobId = jobId;
+        this.rsId = rsId;
+        this.callback = callback;
+    }
+
+    @Override
+    public void doRun() {
+        try {
+            Status status = ccs.getDatasetDirectoryService().getResultStatus(jobId, rsId);
+            callback.setValue(status);
+        } catch (HyracksDataException e) {
+            callback.setException(e);
+        }
+    }
+
+    @Override
+    public String toString() {
+        return "JobId@" + jobId + " ResultSetId@" + rsId;
+    }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java
index b6a33cd..b062d33 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java
@@ -16,6 +16,7 @@
 
 import java.util.EnumSet;
 
+import edu.uci.ics.hyracks.api.dataset.IDatasetDirectoryService;
 import edu.uci.ics.hyracks.api.exceptions.HyracksException;
 import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGenerator;
 import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterResultPartitionLocationWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterResultPartitionLocationWork.java
new file mode 100644
index 0000000..f86e924
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterResultPartitionLocationWork.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.cc.work;
+
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.work.AbstractWork;
+
+public class RegisterResultPartitionLocationWork extends AbstractWork {
+    private final ClusterControllerService ccs;
+
+    private final JobId jobId;
+
+    private final ResultSetId rsId;
+
+    private final boolean orderedResult;
+
+    private final int partition;
+
+    private final int nPartitions;
+
+    private final NetworkAddress networkAddress;
+
+    public RegisterResultPartitionLocationWork(ClusterControllerService ccs, JobId jobId, ResultSetId rsId,
+            boolean orderedResult, int partition, int nPartitions, NetworkAddress networkAddress) {
+        this.ccs = ccs;
+        this.jobId = jobId;
+        this.rsId = rsId;
+        this.orderedResult = orderedResult;
+        this.partition = partition;
+        this.nPartitions = nPartitions;
+        this.networkAddress = networkAddress;
+    }
+
+    @Override
+    public void run() {
+        ccs.getDatasetDirectoryService().registerResultPartitionLocation(jobId, rsId, orderedResult, partition,
+                nPartitions, networkAddress);
+    }
+
+    @Override
+    public String toString() {
+        return "JobId@" + jobId + " ResultSetId@" + rsId + " Partition@" + partition + " NPartitions@" + nPartitions
+                + " ResultPartitionLocation@" + networkAddress + " OrderedResult@" + orderedResult;
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ReportResultPartitionFailureWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ReportResultPartitionFailureWork.java
new file mode 100644
index 0000000..4aea41e
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ReportResultPartitionFailureWork.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.cc.work;
+
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.work.AbstractWork;
+
+public class ReportResultPartitionFailureWork extends AbstractWork {
+    private final ClusterControllerService ccs;
+
+    private final JobId jobId;
+
+    private final ResultSetId rsId;
+
+    private final int partition;
+
+    public ReportResultPartitionFailureWork(ClusterControllerService ccs, JobId jobId, ResultSetId rsId, int partition) {
+        this.ccs = ccs;
+        this.jobId = jobId;
+        this.rsId = rsId;
+        this.partition = partition;
+    }
+
+    @Override
+    public void run() {
+        ccs.getDatasetDirectoryService().reportResultPartitionFailure(jobId, rsId, partition);
+    }
+
+    @Override
+    public String toString() {
+        return "JobId@" + jobId + " ResultSetId@" + rsId + " Partition@" + partition;
+    }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ReportResultPartitionWriteCompletionWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ReportResultPartitionWriteCompletionWork.java
new file mode 100644
index 0000000..313b730
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ReportResultPartitionWriteCompletionWork.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.cc.work;
+
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.work.AbstractWork;
+
+public class ReportResultPartitionWriteCompletionWork extends AbstractWork {
+    private final ClusterControllerService ccs;
+
+    private final JobId jobId;
+
+    private final ResultSetId rsId;
+
+    private final int partition;
+
+    public ReportResultPartitionWriteCompletionWork(ClusterControllerService ccs, JobId jobId, ResultSetId rsId,
+            int partition) {
+        this.ccs = ccs;
+        this.jobId = jobId;
+        this.rsId = rsId;
+        this.partition = partition;
+    }
+
+    @Override
+    public void run() {
+        ccs.getDatasetDirectoryService().reportResultPartitionWriteCompletion(jobId, rsId, partition);
+    }
+
+    @Override
+    public String toString() {
+        return "JobId@" + jobId + " ResultSetId@" + rsId + " Partition@" + partition;
+    }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/NodeDetailsPage.js b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/NodeDetailsPage.js
index ff9d8a0..8e94269 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/NodeDetailsPage.js
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/resources/static/javascript/adminconsole/NodeDetailsPage.js
@@ -58,6 +58,10 @@
         var netPayloadBytesWritten = result['net-payload-bytes-written'];
         var netSignalingBytesRead = result['net-signaling-bytes-read'];
         var netSignalingBytesWritten = result['net-signaling-bytes-written'];
+        var datasetNetPayloadBytesRead = result['dataset-net-payload-bytes-read'];
+        var datasetNetPayloadBytesWritten = result['dataset-net-payload-bytes-written'];
+        var datasetNetSignalingBytesRead = result['dataset-net-signaling-bytes-read'];
+        var datasetNetSignalingBytesWritten = result['dataset-net-signaling-bytes-written'];
         var ipcMessagesSent = result['ipc-messages-sent'];
         var ipcMessageBytesSent = result['ipc-message-bytes-sent'];
         var ipcMessagesReceived = result['ipc-messages-received'];
@@ -117,8 +121,12 @@
             }
             if (i < sysLoad.length - 1) {
                 netPayloadReadBWArray.push([ i, computeRate(netPayloadBytesRead, rrdPtr) ]);
+                netPayloadReadBWArray.push([ i, computeRate(datasetNetPayloadBytesRead, rrdPtr) ]);
                 netPayloadWriteBWArray.push([ i, computeRate(netPayloadBytesWritten, rrdPtr) ]);
+                netPayloadWriteBWArray.push([ i, computeRate(datasetNetPayloadBytesWritten, rrdPtr) ]);
                 netSignalingReadBWArray.push([ i, computeRate(netSignalingBytesRead, rrdPtr) ]);
+                netSignalingReadBWArray.push([ i, computeRate(datasetNetSignalingBytesRead, rrdPtr) ]);
+                netSignalingWriteBWArray.push([ i, computeRate(netSignalingBytesWritten, rrdPtr) ]);
                 netSignalingWriteBWArray.push([ i, computeRate(netSignalingBytesWritten, rrdPtr) ]);
                 ipcMessageSendRateArray.push([ i, computeRate(ipcMessagesSent, rrdPtr) ]);
                 ipcMessageBytesSendRateArray.push([ i, computeRate(ipcMessageBytesSent, rrdPtr) ]);
@@ -229,4 +237,4 @@
     }
 
     fetchData();
-});
\ No newline at end of file
+});
diff --git a/hyracks/hyracks-control/hyracks-control-common/pom.xml b/hyracks/hyracks-control/hyracks-control-common/pom.xml
index 096f2e1..ce1298e 100644
--- a/hyracks/hyracks-control/hyracks-control-common/pom.xml
+++ b/hyracks/hyracks-control/hyracks-control-common/pom.xml
@@ -18,8 +18,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java
index 0c5bb2f..55e4479 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java
@@ -16,7 +16,9 @@
 
 import java.util.List;
 
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
 import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.control.common.application.ApplicationStatus;
 import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
@@ -46,6 +48,13 @@
 
     public void registerPartitionRequest(PartitionRequest partitionRequest) throws Exception;
 
+    public void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult, int partition,
+            int nPartitions, NetworkAddress networkAddress) throws Exception;
+
+    public void reportResultPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) throws Exception;
+
+    public void reportResultPartitionFailure(JobId jobId, ResultSetId rsId, int partition) throws Exception;
+
     public void notifyApplicationStateChange(String nodeId, String appName, ApplicationStatus status) throws Exception;
 
     public void sendApplicationMessageToCC(byte[] data, String appName, String nodeId) throws Exception;
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java
index 167eb4b..5071bc9 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java
@@ -37,6 +37,9 @@
     @Option(name = "-data-ip-address", usage = "IP Address to bind data listener", required = true)
     public String dataIPAddress;
 
+    @Option(name = "-result-ip-address", usage = "IP Address to bind dataset result distribution listener", required = true)
+    public String datasetIPAddress;
+
     @Option(name = "-iodevices", usage = "Comma separated list of IO Device mount points (default: One device in default temp folder)", required = false)
     public String ioDevices = System.getProperty("java.io.tmpdir");
 
@@ -55,6 +58,9 @@
     @Option(name = "-max-memory", usage = "Maximum memory usable at this Node Controller in bytes (default: -1 auto)")
     public int maxMemory = -1;
 
+    @Option(name = "-result-manager-memory", usage = "Memory usable for result caching at this Node Controller in bytes (default: -1 auto)")
+    public int resultManagerMemory = -1;
+
     public void toCommandLine(List<String> cList) {
         cList.add("-cc-host");
         cList.add(ccHost);
@@ -66,6 +72,7 @@
         cList.add(nodeId);
         cList.add("-data-ip-address");
         cList.add(dataIPAddress);
+        cList.add(datasetIPAddress);
         cList.add("-iodevices");
         cList.add(ioDevices);
         cList.add("-dcache-client-servers");
@@ -80,5 +87,7 @@
         cList.add(String.valueOf(nNetThreads));
         cList.add("-max-memory");
         cList.add(String.valueOf(maxMemory));
+        cList.add("-result-manager-memory");
+        cList.add(String.valueOf(resultManagerMemory));
     }
 }
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeRegistration.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeRegistration.java
index 91cfecf..a897602 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeRegistration.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NodeRegistration.java
@@ -33,6 +33,8 @@
 
     private final NetworkAddress dataPort;
 
+    private final NetworkAddress datasetPort;
+
     private final String osName;
 
     private final String arch;
@@ -60,13 +62,14 @@
     private final HeartbeatSchema hbSchema;
 
     public NodeRegistration(InetSocketAddress ncAddress, String nodeId, NCConfig ncConfig, NetworkAddress dataPort,
-            String osName, String arch, String osVersion, int nProcessors, String vmName, String vmVersion,
-            String vmVendor, String classpath, String libraryPath, String bootClasspath, List<String> inputArguments,
-            Map<String, String> systemProperties, HeartbeatSchema hbSchema) {
+            NetworkAddress datasetPort, String osName, String arch, String osVersion, int nProcessors, String vmName,
+            String vmVersion, String vmVendor, String classpath, String libraryPath, String bootClasspath,
+            List<String> inputArguments, Map<String, String> systemProperties, HeartbeatSchema hbSchema) {
         this.ncAddress = ncAddress;
         this.nodeId = nodeId;
         this.ncConfig = ncConfig;
         this.dataPort = dataPort;
+        this.datasetPort = datasetPort;
         this.osName = osName;
         this.arch = arch;
         this.osVersion = osVersion;
@@ -98,6 +101,10 @@
         return dataPort;
     }
 
+    public NetworkAddress getDatasetPort() {
+        return datasetPort;
+    }
+
     public String getOSName() {
         return osName;
     }
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatData.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatData.java
index 1dba3bc..663c68a 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatData.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/heartbeat/HeartbeatData.java
@@ -37,6 +37,10 @@
     public long netPayloadBytesWritten;
     public long netSignalingBytesRead;
     public long netSignalingBytesWritten;
+    public long datasetNetPayloadBytesRead;
+    public long datasetNetPayloadBytesWritten;
+    public long datasetNetSignalingBytesRead;
+    public long datasetNetSignalingBytesWritten;
     public long ipcMessagesSent;
     public long ipcMessageBytesSent;
     public long ipcMessagesReceived;
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java
index 557a8cb..b506b12 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java
@@ -36,6 +36,7 @@
 import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
 import edu.uci.ics.hyracks.api.dataflow.TaskId;
 import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.job.JobFlag;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.job.JobStatus;
@@ -68,6 +69,9 @@
         REPORT_PROFILE,
         REGISTER_PARTITION_PROVIDER,
         REGISTER_PARTITION_REQUEST,
+        REGISTER_RESULT_PARTITION_LOCATION,
+        REPORT_RESULT_PARTITION_WRITE_COMPLETION,
+        REPORT_RESULT_PARTITION_FAILURE,
         APPLICATION_STATE_CHANGE_RESPONSE,
 
         NODE_REGISTRATION_RESULT,
@@ -438,6 +442,127 @@
         }
     }
 
+    public static class RegisterResultPartitionLocationFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final JobId jobId;
+
+        private final ResultSetId rsId;
+
+        private final boolean orderedResult;
+
+        private final int partition;
+
+        private final int nPartitions;
+
+        private NetworkAddress networkAddress;
+
+        public RegisterResultPartitionLocationFunction(JobId jobId, ResultSetId rsId, boolean orderedResult,
+                int partition, int nPartitions, NetworkAddress networkAddress) {
+            this.jobId = jobId;
+            this.rsId = rsId;
+            this.orderedResult = orderedResult;
+            this.partition = partition;
+            this.nPartitions = nPartitions;
+            this.networkAddress = networkAddress;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.REGISTER_RESULT_PARTITION_LOCATION;
+        }
+
+        public JobId getJobId() {
+            return jobId;
+        }
+
+        public ResultSetId getResultSetId() {
+            return rsId;
+        }
+
+        public boolean getOrderedResult() {
+            return orderedResult;
+        }
+
+        public int getPartition() {
+            return partition;
+        }
+
+        public int getNPartitions() {
+            return nPartitions;
+        }
+
+        public NetworkAddress getNetworkAddress() {
+            return networkAddress;
+        }
+    }
+
+    public static class ReportResultPartitionWriteCompletionFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final JobId jobId;
+
+        private final ResultSetId rsId;
+
+        private final int partition;
+
+        public ReportResultPartitionWriteCompletionFunction(JobId jobId, ResultSetId rsId, int partition) {
+            this.jobId = jobId;
+            this.rsId = rsId;
+            this.partition = partition;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.REPORT_RESULT_PARTITION_WRITE_COMPLETION;
+        }
+
+        public JobId getJobId() {
+            return jobId;
+        }
+
+        public ResultSetId getResultSetId() {
+            return rsId;
+        }
+
+        public int getPartition() {
+            return partition;
+        }
+    }
+
+    public static class ReportResultPartitionFailureFunction extends Function {
+        private static final long serialVersionUID = 1L;
+
+        private final JobId jobId;
+
+        private final ResultSetId rsId;
+
+        private final int partition;
+
+        public ReportResultPartitionFailureFunction(JobId jobId, ResultSetId rsId, int partition) {
+            this.jobId = jobId;
+            this.rsId = rsId;
+            this.partition = partition;
+        }
+
+        @Override
+        public FunctionId getFunctionId() {
+            return FunctionId.REPORT_RESULT_PARTITION_FAILURE;
+        }
+
+        public JobId getJobId() {
+            return jobId;
+        }
+
+        public ResultSetId getResultSetId() {
+            return rsId;
+        }
+
+        public int getPartition() {
+            return partition;
+        }
+    }
+
     public static class ApplicationStateChangeResponseFunction extends Function {
         private static final long serialVersionUID = 1L;
 
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
index bbaab4e..091a5d2 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
@@ -16,7 +16,9 @@
 
 import java.util.List;
 
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
 import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.control.common.application.ApplicationStatus;
 import edu.uci.ics.hyracks.control.common.base.IClusterController;
@@ -95,6 +97,28 @@
     }
 
     @Override
+    public void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult, int partition,
+            int nPartitions, NetworkAddress networkAddress) throws Exception {
+        CCNCFunctions.RegisterResultPartitionLocationFunction fn = new CCNCFunctions.RegisterResultPartitionLocationFunction(
+                jobId, rsId, orderedResult, partition, nPartitions, networkAddress);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void reportResultPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) throws Exception {
+        CCNCFunctions.ReportResultPartitionWriteCompletionFunction fn = new CCNCFunctions.ReportResultPartitionWriteCompletionFunction(
+                jobId, rsId, partition);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
+    public void reportResultPartitionFailure(JobId jobId, ResultSetId rsId, int partition) throws Exception {
+        CCNCFunctions.ReportResultPartitionFailureFunction fn = new CCNCFunctions.ReportResultPartitionFailureFunction(
+                jobId, rsId, partition);
+        ipcHandle.send(-1, fn, null);
+    }
+
+    @Override
     public void notifyApplicationStateChange(String nodeId, String appName, ApplicationStatus status) throws Exception {
         CCNCFunctions.ApplicationStateChangeResponseFunction fn = new CCNCFunctions.ApplicationStateChangeResponseFunction(
                 nodeId, appName, status);
diff --git a/hyracks/hyracks-control/hyracks-control-nc/pom.xml b/hyracks/hyracks-control/hyracks-control-nc/pom.xml
index 9ecc083..c44cec9 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/pom.xml
+++ b/hyracks/hyracks-control/hyracks-control-nc/pom.xml
@@ -15,8 +15,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
@@ -40,6 +41,11 @@
   		<artifactId>hyracks-net</artifactId>
   		<version>0.2.3-SNAPSHOT</version>
   	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-comm</artifactId>
+  		<version>0.2.3-SNAPSHOT</version>
+  	</dependency>
   </dependencies>
   <reporting>
     <plugins>
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java
index 0195143..8a36dac 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java
@@ -45,6 +45,7 @@
 
 import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
 import edu.uci.ics.hyracks.api.context.IHyracksRootContext;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
 import edu.uci.ics.hyracks.api.io.IODeviceHandle;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.control.common.AbstractRemoteService;
@@ -61,7 +62,9 @@
 import edu.uci.ics.hyracks.control.common.work.FutureValue;
 import edu.uci.ics.hyracks.control.common.work.WorkQueue;
 import edu.uci.ics.hyracks.control.nc.application.NCApplicationContext;
+import edu.uci.ics.hyracks.control.nc.dataset.DatasetPartitionManager;
 import edu.uci.ics.hyracks.control.nc.io.IOManager;
+import edu.uci.ics.hyracks.control.nc.net.DatasetNetworkManager;
 import edu.uci.ics.hyracks.control.nc.net.NetworkManager;
 import edu.uci.ics.hyracks.control.nc.partitions.PartitionManager;
 import edu.uci.ics.hyracks.control.nc.runtime.RootHyracksContext;
@@ -94,6 +97,10 @@
 
     private final NetworkManager netManager;
 
+    private final IDatasetPartitionManager datasetPartitionManager;
+
+    private final DatasetNetworkManager datasetNetworkManager;
+
     private final WorkQueue queue;
 
     private final Timer timer;
@@ -140,7 +147,11 @@
             throw new Exception("id not set");
         }
         partitionManager = new PartitionManager(this);
-        netManager = new NetworkManager(getIpAddress(ncConfig), partitionManager, ncConfig.nNetThreads);
+        netManager = new NetworkManager(getIpAddress(ncConfig.dataIPAddress), partitionManager, ncConfig.nNetThreads);
+
+        datasetPartitionManager = new DatasetPartitionManager(this, executor, ncConfig.resultManagerMemory);
+        datasetNetworkManager = new DatasetNetworkManager(getIpAddress(ncConfig.datasetIPAddress),
+                datasetPartitionManager, ncConfig.nNetThreads);
 
         queue = new WorkQueue();
         jobletMap = new Hashtable<JobId, Joblet>();
@@ -205,6 +216,7 @@
         LOGGER.log(Level.INFO, "Starting NodeControllerService");
         ipc.start();
         netManager.start();
+        datasetNetworkManager.start();
         IIPCHandle ccIPCHandle = ipc.getHandle(new InetSocketAddress(ncConfig.ccHost, ncConfig.ccPort));
         this.ccs = new ClusterControllerRemoteProxy(ccIPCHandle);
         HeartbeatSchema.GarbageCollectorInfo[] gcInfos = new HeartbeatSchema.GarbageCollectorInfo[gcMXBeans.size()];
@@ -213,10 +225,11 @@
         }
         HeartbeatSchema hbSchema = new HeartbeatSchema(gcInfos);
         ccs.registerNode(new NodeRegistration(ipc.getSocketAddress(), id, ncConfig, netManager.getNetworkAddress(),
-                osMXBean.getName(), osMXBean.getArch(), osMXBean.getVersion(), osMXBean.getAvailableProcessors(),
-                runtimeMXBean.getVmName(), runtimeMXBean.getVmVersion(), runtimeMXBean.getVmVendor(), runtimeMXBean
-                        .getClassPath(), runtimeMXBean.getLibraryPath(), runtimeMXBean.getBootClassPath(),
-                runtimeMXBean.getInputArguments(), runtimeMXBean.getSystemProperties(), hbSchema));
+                datasetNetworkManager.getNetworkAddress(), osMXBean.getName(), osMXBean.getArch(), osMXBean
+                        .getVersion(), osMXBean.getAvailableProcessors(), runtimeMXBean.getVmName(), runtimeMXBean
+                        .getVmVersion(), runtimeMXBean.getVmVendor(), runtimeMXBean.getClassPath(), runtimeMXBean
+                        .getLibraryPath(), runtimeMXBean.getBootClassPath(), runtimeMXBean.getInputArguments(),
+                runtimeMXBean.getSystemProperties(), hbSchema));
 
         synchronized (this) {
             while (registrationPending) {
@@ -247,8 +260,10 @@
         LOGGER.log(Level.INFO, "Stopping NodeControllerService");
         executor.shutdownNow();
         partitionManager.close();
+        datasetPartitionManager.close();
         heartbeatTask.cancel();
         netManager.stop();
+        datasetNetworkManager.stop();
         queue.stop();
         LOGGER.log(Level.INFO, "Stopped NodeControllerService");
     }
@@ -273,6 +288,10 @@
         return netManager;
     }
 
+    public DatasetNetworkManager getDatasetNetworkManager() {
+        return datasetNetworkManager;
+    }
+
     public PartitionManager getPartitionManager() {
         return partitionManager;
     }
@@ -297,8 +316,7 @@
         return queue;
     }
 
-    private static InetAddress getIpAddress(NCConfig ncConfig) throws Exception {
-        String ipaddrStr = ncConfig.dataIPAddress;
+    private static InetAddress getIpAddress(String ipaddrStr) throws Exception {
         ipaddrStr = ipaddrStr.trim();
         Pattern pattern = Pattern.compile("(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})");
         Matcher m = pattern.matcher(ipaddrStr);
@@ -355,6 +373,12 @@
             hbData.netSignalingBytesRead = netPC.getSignalingBytesRead();
             hbData.netSignalingBytesWritten = netPC.getSignalingBytesWritten();
 
+            MuxDemuxPerformanceCounters datasetNetPC = datasetNetworkManager.getPerformanceCounters();
+            hbData.datasetNetPayloadBytesRead = datasetNetPC.getPayloadBytesRead();
+            hbData.datasetNetPayloadBytesWritten = datasetNetPC.getPayloadBytesWritten();
+            hbData.datasetNetSignalingBytesRead = datasetNetPC.getSignalingBytesRead();
+            hbData.datasetNetSignalingBytesWritten = datasetNetPC.getSignalingBytesWritten();
+
             IPCPerformanceCounters ipcPC = ipc.getPerformanceCounters();
             hbData.ipcMessagesSent = ipcPC.getMessageSentCount();
             hbData.ipcMessageBytesSent = ipcPC.getMessageBytesSent();
@@ -459,6 +483,10 @@
         }
     }
 
+    public IDatasetPartitionManager getDatasetPartitionManager() {
+        return datasetPartitionManager;
+    }
+
     public void sendApplicationMessageToCC(byte[] data, String appName, String nodeId) throws Exception {
         ccs.sendApplicationMessageToCC(data, appName, nodeId);
     }
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java
index eba3ec9..5a3e9dd 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java
@@ -34,6 +34,7 @@
 import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
 import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
 import edu.uci.ics.hyracks.api.dataflow.state.IStateObject;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.exceptions.HyracksException;
 import edu.uci.ics.hyracks.api.io.FileReference;
@@ -348,6 +349,11 @@
     }
 
     @Override
+    public IDatasetPartitionManager getDatasetPartitionManager() {
+        return ncs.getDatasetPartitionManager();
+    }
+
+    @Override
     public void sendApplicationMessageToCC(byte[] message, String nodeId) throws Exception {
         this.ncs.sendApplicationMessageToCC(message, this.getJobletContext().getApplicationContext()
                 .getApplicationName(), nodeId);
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetMemoryManager.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetMemoryManager.java
new file mode 100644
index 0000000..cecd677
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetMemoryManager.java
@@ -0,0 +1,237 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.nc.dataset;
+
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionWriter;
+import edu.uci.ics.hyracks.api.dataset.Page;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.partitions.ResultSetPartitionId;
+
+public class DatasetMemoryManager {
+    private final Set<Page> availPages;
+
+    private final LeastRecentlyUsedList leastRecentlyUsedList;
+
+    private final Map<ResultSetPartitionId, PartitionNode> resultPartitionNodesMap;
+
+    private final static int FRAME_SIZE = 32768;
+
+    public DatasetMemoryManager(int availableMemory) {
+        availPages = new HashSet<Page>();
+
+        // Atleast have one page for temporarily storing the results.
+        if (availableMemory <= 0)
+            availableMemory = FRAME_SIZE;
+
+        while (availableMemory >= FRAME_SIZE) {
+            /* TODO(madhusudancs): Should we have some way of accounting this memory usage by using Hyrack's allocateFrame()
+             * instead of direct ByteBuffer.allocate()?
+             */
+            availPages.add(new Page(ByteBuffer.allocate(FRAME_SIZE)));
+            availableMemory -= FRAME_SIZE;
+        }
+
+        leastRecentlyUsedList = new LeastRecentlyUsedList();
+        resultPartitionNodesMap = new HashMap<ResultSetPartitionId, PartitionNode>();
+    }
+
+    public Page requestPage(ResultSetPartitionId resultSetPartitionId, IDatasetPartitionWriter dpw)
+            throws OutOfMemoryError, HyracksDataException {
+        Page page;
+        if (availPages.isEmpty()) {
+            page = evictPage();
+        } else {
+            page = getAvailablePage();
+        }
+
+        page.clear();
+
+        /*
+         * It is extremely important to update the reference after obtaining the page because, in the cases where memory
+         * manager is allocated only one page of memory, the front of the LRU list should not be created by the
+         * update reference call before a page is pushed on to the element of the LRU list. So we first obtain the page,
+         * then make a updateReference call which in turn creates a new node in the LRU list and then add the page to it.
+         */
+        PartitionNode pn = updateReference(resultSetPartitionId, dpw);
+        pn.add(page);
+        return page;
+    }
+
+    public void pageReferenced(ResultSetPartitionId resultSetPartitionId) {
+        // When a page is referenced the dataset partition writer should already be known, so we pass null.
+        updateReference(resultSetPartitionId, null);
+    }
+
+    public int getPageSize() {
+        return FRAME_SIZE;
+    }
+
+    protected void insertPartitionNode(ResultSetPartitionId resultSetPartitionId, PartitionNode pn) {
+        leastRecentlyUsedList.add(pn);
+        resultPartitionNodesMap.put(resultSetPartitionId, pn);
+    }
+
+    protected synchronized PartitionNode updateReference(ResultSetPartitionId resultSetPartitionId,
+            IDatasetPartitionWriter dpw) {
+        PartitionNode pn = null;
+
+        if (!resultPartitionNodesMap.containsKey(resultSetPartitionId)) {
+            if (dpw != null) {
+                pn = new PartitionNode(resultSetPartitionId, dpw);
+                insertPartitionNode(resultSetPartitionId, pn);
+            }
+            return pn;
+        }
+        pn = resultPartitionNodesMap.get(resultSetPartitionId);
+        leastRecentlyUsedList.remove(pn);
+        insertPartitionNode(resultSetPartitionId, pn);
+
+        return pn;
+    }
+
+    protected synchronized Page evictPage() throws HyracksDataException {
+        PartitionNode pn = leastRecentlyUsedList.getFirst();
+        IDatasetPartitionWriter dpw = pn.getDatasetPartitionWriter();
+        Page page = dpw.returnPage();
+
+        /* If the partition holding the pages breaks the contract by not returning the page or it has no page, just take
+         * away all the pages allocated to it and add to the available pages set.
+         */
+        if (page == null) {
+            availPages.addAll(pn);
+            pn.clear();
+            resultPartitionNodesMap.remove(pn.getResultSetPartitionId());
+            leastRecentlyUsedList.remove(pn);
+
+            /* Based on the assumption that if the dataset partition writer returned a null page, it should be lying about
+             * the number of pages it holds in which case we just evict all the pages it holds and should thus be able to
+             * add all those pages to available set and we have at least one page to allocate back.
+             */
+            page = getAvailablePage();
+        } else {
+            pn.remove(page);
+
+            // If the partition no more holds any pages, remove it from the linked list and the hash map.
+            if (pn.isEmpty()) {
+                resultPartitionNodesMap.remove(pn.getResultSetPartitionId());
+                leastRecentlyUsedList.remove(pn);
+            }
+        }
+
+        return page;
+    }
+
+    protected synchronized Page getAvailablePage() {
+        Iterator<Page> iter = availPages.iterator();
+        Page page = iter.next();
+        iter.remove();
+        return page;
+    }
+
+    private class LeastRecentlyUsedList {
+        private PartitionNode head;
+
+        private PartitionNode tail;
+
+        public LeastRecentlyUsedList() {
+            head = null;
+            tail = null;
+        }
+
+        public void add(PartitionNode node) {
+            if (head == null) {
+                head = tail = node;
+                return;
+            }
+            tail.setNext(node);
+            node.setPrev(tail);
+            tail = node;
+        }
+
+        public void remove(PartitionNode node) {
+            if ((node == head) && (node == tail)) {
+                head = tail = null;
+                return;
+            } else if (node == head) {
+                head = head.getNext();
+                head.setPrev(null);
+                return;
+            } else if (node == tail) {
+                tail = tail.getPrev();
+                tail.setNext(null);
+                return;
+            } else {
+                PartitionNode prev = node.getPrev();
+                PartitionNode next = node.getNext();
+                prev.setNext(next);
+                next.setPrev(prev);
+            }
+        }
+
+        public PartitionNode getFirst() {
+            return head;
+        }
+    }
+
+    private class PartitionNode extends HashSet<Page> {
+        private static final long serialVersionUID = 1L;
+
+        private final ResultSetPartitionId resultSetPartitionId;
+
+        private final IDatasetPartitionWriter datasetPartitionWriter;
+
+        private PartitionNode prev;
+
+        private PartitionNode next;
+
+        public PartitionNode(ResultSetPartitionId resultSetPartitionId, IDatasetPartitionWriter datasetPartitionWriter) {
+            this.resultSetPartitionId = resultSetPartitionId;
+            this.datasetPartitionWriter = datasetPartitionWriter;
+            prev = null;
+            next = null;
+        }
+
+        public ResultSetPartitionId getResultSetPartitionId() {
+            return resultSetPartitionId;
+        }
+
+        public IDatasetPartitionWriter getDatasetPartitionWriter() {
+            return datasetPartitionWriter;
+        }
+
+        public void setPrev(PartitionNode node) {
+            prev = node;
+        }
+
+        public PartitionNode getPrev() {
+            return prev;
+        }
+
+        public void setNext(PartitionNode node) {
+            next = node;
+        }
+
+        public PartitionNode getNext() {
+            return next;
+        }
+    }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionManager.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionManager.java
new file mode 100644
index 0000000..1cad54b
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionManager.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.nc.dataset;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.Executor;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionReader;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.io.IWorkspaceFileFactory;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+import edu.uci.ics.hyracks.control.nc.io.IOManager;
+import edu.uci.ics.hyracks.control.nc.io.WorkspaceFileFactory;
+import edu.uci.ics.hyracks.control.nc.resources.DefaultDeallocatableRegistry;
+
+public class DatasetPartitionManager implements IDatasetPartitionManager {
+    private final NodeControllerService ncs;
+
+    private final Executor executor;
+
+    private final Map<JobId, ResultState[]> partitionResultStateMap;
+
+    private final DefaultDeallocatableRegistry deallocatableRegistry;
+
+    private final IWorkspaceFileFactory fileFactory;
+
+    private final DatasetMemoryManager datasetMemoryManager;
+
+    public DatasetPartitionManager(NodeControllerService ncs, Executor executor, int availableMemory) {
+        this.ncs = ncs;
+        this.executor = executor;
+        partitionResultStateMap = new HashMap<JobId, ResultState[]>();
+        deallocatableRegistry = new DefaultDeallocatableRegistry();
+        fileFactory = new WorkspaceFileFactory(deallocatableRegistry, (IOManager) ncs.getRootContext().getIOManager());
+        datasetMemoryManager = new DatasetMemoryManager(availableMemory);
+    }
+
+    @Override
+    public IFrameWriter createDatasetPartitionWriter(IHyracksTaskContext ctx, ResultSetId rsId, boolean orderedResult,
+            int partition, int nPartitions) throws HyracksException {
+        DatasetPartitionWriter dpw = null;
+        JobId jobId = ctx.getJobletContext().getJobId();
+        try {
+            ncs.getClusterController().registerResultPartitionLocation(jobId, rsId, orderedResult, partition,
+                    nPartitions, ncs.getDatasetNetworkManager().getNetworkAddress());
+            dpw = new DatasetPartitionWriter(ctx, this, jobId, rsId, partition, datasetMemoryManager);
+
+            ResultState[] resultStates = partitionResultStateMap.get(jobId);
+            if (resultStates == null) {
+                resultStates = new ResultState[nPartitions];
+                partitionResultStateMap.put(jobId, resultStates);
+            }
+            resultStates[partition] = dpw.getResultState();
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+
+        return dpw;
+    }
+
+    @Override
+    public void reportPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) throws HyracksException {
+        try {
+            ncs.getClusterController().reportResultPartitionWriteCompletion(jobId, rsId, partition);
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+    }
+
+    @Override
+    public void reportPartitionFailure(JobId jobId, ResultSetId rsId, int partition) throws HyracksException {
+        try {
+            ncs.getClusterController().reportResultPartitionFailure(jobId, rsId, partition);
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+    }
+
+    @Override
+    public void initializeDatasetPartitionReader(JobId jobId, int partition, IFrameWriter writer)
+            throws HyracksException {
+        ResultState[] resultStates = partitionResultStateMap.get(jobId);
+
+        if (resultStates == null) {
+            throw new HyracksException("Unknown JobId " + jobId);
+        }
+
+        ResultState resultState = resultStates[partition];
+        if (resultState == null) {
+            throw new HyracksException("No DatasetPartitionWriter for partition " + partition);
+        }
+
+        IDatasetPartitionReader dpr = new DatasetPartitionReader(datasetMemoryManager, executor, resultState);
+        dpr.writeTo(writer);
+    }
+
+    @Override
+    public IWorkspaceFileFactory getFileFactory() {
+        return fileFactory;
+    }
+
+    @Override
+    public void close() {
+        deallocatableRegistry.close();
+    }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionReader.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionReader.java
new file mode 100644
index 0000000..296c502
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionReader.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.nc.dataset;
+
+import java.nio.ByteBuffer;
+import java.util.concurrent.Executor;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionReader;
+import edu.uci.ics.hyracks.api.dataset.Page;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.IFileHandle;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.comm.channels.NetworkOutputChannel;
+
+public class DatasetPartitionReader implements IDatasetPartitionReader {
+    private static final Logger LOGGER = Logger.getLogger(DatasetPartitionReader.class.getName());
+
+    private final DatasetMemoryManager datasetMemoryManager;
+
+    private final Executor executor;
+
+    private final ResultState resultState;
+
+    private IFileHandle fileHandle;
+
+    public DatasetPartitionReader(DatasetMemoryManager datasetMemoryManager, Executor executor, ResultState resultState) {
+        this.datasetMemoryManager = datasetMemoryManager;
+        this.executor = executor;
+        this.resultState = resultState;
+    }
+
+    private long read(long offset, ByteBuffer buffer) throws HyracksDataException {
+        long readSize = 0;
+        synchronized (resultState) {
+            while (offset >= resultState.getSize() && !resultState.getEOS()) {
+                try {
+                    resultState.wait();
+                } catch (InterruptedException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+        }
+
+        if (offset >= resultState.getSize() && resultState.getEOS()) {
+            return readSize;
+        }
+
+        if (offset < resultState.getPersistentSize()) {
+            readSize = resultState.getIOManager().syncRead(fileHandle, offset, buffer);
+        }
+
+        if (readSize < buffer.capacity()) {
+            long localPageOffset = offset - resultState.getPersistentSize();
+            int localPageIndex = (int) (localPageOffset / datasetMemoryManager.getPageSize());
+            int pageOffset = (int) (localPageOffset % datasetMemoryManager.getPageSize());
+            Page page = resultState.getPage(localPageIndex);
+            readSize += buffer.remaining();
+            buffer.put(page.getBuffer().array(), pageOffset, buffer.remaining());
+        }
+
+        datasetMemoryManager.pageReferenced(resultState.getResultSetPartitionId());
+        return readSize;
+    }
+
+    @Override
+    public void writeTo(final IFrameWriter writer) {
+        executor.execute(new Runnable() {
+            @Override
+            public void run() {
+                NetworkOutputChannel channel = (NetworkOutputChannel) writer;
+                channel.setFrameSize(resultState.getFrameSize());
+                try {
+                    fileHandle = resultState.getIOManager().open(resultState.getValidFileReference(),
+                            IIOManager.FileReadWriteMode.READ_ONLY, IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
+                    channel.open();
+                    try {
+                        long offset = 0;
+                        ByteBuffer buffer = ByteBuffer.allocate(resultState.getFrameSize());
+                        while (true) {
+                            buffer.clear();
+                            long size = read(offset, buffer);
+                            if (size <= 0) {
+                                break;
+                            } else if (size < buffer.limit()) {
+                                throw new HyracksDataException("Premature end of file - readSize: " + size
+                                        + " buffer limit: " + buffer.limit());
+                            }
+                            offset += size;
+                            buffer.flip();
+                            channel.nextFrame(buffer);
+                        }
+                    } finally {
+                        channel.close();
+                        resultState.getIOManager().close(fileHandle);
+                    }
+                } catch (InterruptedException e) {
+                    throw new RuntimeException(e);
+                } catch (HyracksDataException e) {
+                    throw new RuntimeException(e);
+                }
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("result reading successful(" + resultState.getResultSetPartitionId() + ")");
+                }
+            }
+        });
+    }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionWriter.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionWriter.java
new file mode 100644
index 0000000..f6ae540
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionWriter.java
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.nc.dataset;
+
+import java.nio.ByteBuffer;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionWriter;
+import edu.uci.ics.hyracks.api.dataset.Page;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.io.IFileHandle;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.partitions.ResultSetPartitionId;
+
+public class DatasetPartitionWriter implements IDatasetPartitionWriter {
+    private static final Logger LOGGER = Logger.getLogger(DatasetPartitionWriter.class.getName());
+
+    private static final String FILE_PREFIX = "result_";
+
+    private final IDatasetPartitionManager manager;
+
+    private final JobId jobId;
+
+    private final ResultSetId resultSetId;
+
+    private final int partition;
+
+    private final DatasetMemoryManager datasetMemoryManager;
+
+    private final ResultSetPartitionId resultSetPartitionId;
+
+    private final ResultState resultState;
+
+    private IFileHandle fileHandle;
+
+    public DatasetPartitionWriter(IHyracksTaskContext ctx, IDatasetPartitionManager manager, JobId jobId,
+            ResultSetId rsId, int partition, DatasetMemoryManager datasetMemoryManager) {
+        this.manager = manager;
+        this.jobId = jobId;
+        this.resultSetId = rsId;
+        this.partition = partition;
+        this.datasetMemoryManager = datasetMemoryManager;
+
+        resultSetPartitionId = new ResultSetPartitionId(jobId, rsId, partition);
+        resultState = new ResultState(resultSetPartitionId, ctx.getIOManager(), ctx.getFrameSize());
+    }
+
+    public ResultState getResultState() {
+        return resultState;
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("open(" + partition + ")");
+        }
+        String fName = FILE_PREFIX + String.valueOf(partition);
+        FileReference fRef = manager.getFileFactory().createUnmanagedWorkspaceFile(fName);
+        fileHandle = resultState.getIOManager().open(fRef, IIOManager.FileReadWriteMode.READ_WRITE,
+                IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
+        resultState.init(fRef);
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        int srcOffset = 0;
+        Page destPage = resultState.getLastPage();
+
+        while (srcOffset < buffer.limit()) {
+            if ((destPage == null) || (destPage.getBuffer().remaining() <= 0)) {
+                destPage = datasetMemoryManager.requestPage(resultSetPartitionId, this);
+                resultState.addPage(destPage);
+            }
+            int srcLength = Math.min(buffer.limit() - srcOffset, destPage.getBuffer().remaining());
+            destPage.getBuffer().put(buffer.array(), srcOffset, srcLength);
+            srcOffset += srcLength;
+            resultState.incrementSize(srcLength);
+        }
+
+        synchronized (resultState) {
+            resultState.notifyAll();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        try {
+            manager.reportPartitionFailure(jobId, resultSetId, partition);
+        } catch (HyracksException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("close(" + partition + ")");
+        }
+
+        try {
+            synchronized (resultState) {
+                resultState.setEOS(true);
+                resultState.notifyAll();
+            }
+            manager.reportPartitionWriteCompletion(jobId, resultSetId, partition);
+        } catch (HyracksException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public Page returnPage() throws HyracksDataException {
+        Page page = resultState.removePage(0);
+
+        IIOManager ioManager = resultState.getIOManager();
+
+        // If we do not have any pages to be given back close the write channel since we don't write any more, return null.
+        if (page == null) {
+            ioManager.close(fileHandle);
+            return null;
+        }
+
+        page.getBuffer().flip();
+
+        long delta = ioManager.syncWrite(fileHandle, resultState.getPersistentSize(), page.getBuffer());
+        resultState.incrementPersistentSize(delta);
+        return page;
+    }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/ResultState.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/ResultState.java
new file mode 100644
index 0000000..3db3fd9
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/ResultState.java
@@ -0,0 +1,173 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.nc.dataset;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import edu.uci.ics.hyracks.api.dataflow.state.IStateObject;
+import edu.uci.ics.hyracks.api.dataset.Page;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.partitions.ResultSetPartitionId;
+
+public class ResultState implements IStateObject {
+    private final ResultSetPartitionId resultSetPartitionId;
+
+    private final int frameSize;
+
+    private final IIOManager ioManager;
+
+    private final AtomicBoolean eos;
+
+    private final AtomicBoolean readEOS;
+
+    private final List<Page> localPageList;
+
+    private FileReference fileRef;
+
+    private long size;
+
+    private long persistentSize;
+
+    ResultState(ResultSetPartitionId resultSetPartitionId, IIOManager ioManager, int frameSize) {
+        this.resultSetPartitionId = resultSetPartitionId;
+        this.ioManager = ioManager;
+        this.frameSize = frameSize;
+        eos = new AtomicBoolean(false);
+        readEOS = new AtomicBoolean(false);
+        localPageList = new ArrayList<Page>();
+    }
+
+    public synchronized void init(FileReference fileRef) {
+        this.fileRef = fileRef;
+
+        size = 0;
+        persistentSize = 0;
+        notifyAll();
+    }
+
+    public ResultSetPartitionId getResultSetPartitionId() {
+        return resultSetPartitionId;
+    }
+
+    public int getFrameSize() {
+        return frameSize;
+    }
+
+    public IIOManager getIOManager() {
+        return ioManager;
+    }
+
+    public synchronized void incrementSize(long delta) {
+        size += delta;
+    }
+
+    public synchronized long getSize() {
+        return size;
+    }
+
+    public synchronized void incrementPersistentSize(long delta) {
+        persistentSize += delta;
+    }
+
+    public synchronized long getPersistentSize() {
+        return persistentSize;
+    }
+
+    public void setEOS(boolean eos) {
+        this.eos.set(eos);
+    }
+
+    public boolean getEOS() {
+        return eos.get();
+    }
+
+    public boolean getReadEOS() {
+        return readEOS.get();
+    }
+
+    public synchronized void addPage(Page page) {
+        localPageList.add(page);
+    }
+
+    public synchronized Page removePage(int index) {
+        Page page = null;
+        if (!localPageList.isEmpty()) {
+            page = localPageList.remove(index);
+        }
+        return page;
+    }
+
+    public synchronized Page getPage(int index) {
+        Page page = null;
+        if (!localPageList.isEmpty()) {
+            page = localPageList.get(index);
+        }
+        return page;
+    }
+
+    public synchronized Page getLastPage() {
+        Page page = null;
+        if (!localPageList.isEmpty()) {
+            page = localPageList.get(localPageList.size() - 1);
+        }
+        return page;
+    }
+
+    public synchronized Page getFirstPage() {
+        Page page = null;
+        if (!localPageList.isEmpty()) {
+            page = localPageList.get(0);
+        }
+        return page;
+    }
+
+    public synchronized FileReference getValidFileReference() throws InterruptedException {
+        while (fileRef == null)
+            wait();
+        return fileRef;
+    }
+
+    @Override
+    public JobId getJobId() {
+        return resultSetPartitionId.getJobId();
+    }
+
+    @Override
+    public Object getId() {
+        return resultSetPartitionId;
+    }
+
+    @Override
+    public long getMemoryOccupancy() {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void toBytes(DataOutput out) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void fromBytes(DataInput in) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/DatasetNetworkManager.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/DatasetNetworkManager.java
new file mode 100644
index 0000000..5b8b333
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/DatasetNetworkManager.java
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.nc.net;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.nio.ByteBuffer;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.comm.channels.IChannelConnectionFactory;
+import edu.uci.ics.hyracks.comm.channels.NetworkOutputChannel;
+import edu.uci.ics.hyracks.net.buffers.ICloseableBufferAcceptor;
+import edu.uci.ics.hyracks.net.exceptions.NetException;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.ChannelControlBlock;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.IChannelOpenListener;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.MultiplexedConnection;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.MuxDemux;
+import edu.uci.ics.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
+
+public class DatasetNetworkManager implements IChannelConnectionFactory {
+    private static final Logger LOGGER = Logger.getLogger(DatasetNetworkManager.class.getName());
+
+    private static final int MAX_CONNECTION_ATTEMPTS = 5;
+
+    static final int INITIAL_MESSAGE_SIZE = 20;
+
+    private final IDatasetPartitionManager partitionManager;
+
+    private final MuxDemux md;
+
+    private NetworkAddress networkAddress;
+
+    public DatasetNetworkManager(InetAddress inetAddress, IDatasetPartitionManager partitionManager, int nThreads)
+            throws IOException {
+        this.partitionManager = partitionManager;
+        md = new MuxDemux(new InetSocketAddress(inetAddress, 0), new ChannelOpenListener(), nThreads,
+                MAX_CONNECTION_ATTEMPTS);
+    }
+
+    public void start() throws IOException {
+        md.start();
+        InetSocketAddress sockAddr = md.getLocalAddress();
+        networkAddress = new NetworkAddress(sockAddr.getAddress().getAddress(), sockAddr.getPort());
+    }
+
+    public NetworkAddress getNetworkAddress() {
+        return networkAddress;
+    }
+
+    public void stop() {
+
+    }
+
+    public ChannelControlBlock connect(SocketAddress remoteAddress) throws InterruptedException, NetException {
+        MultiplexedConnection mConn = md.connect((InetSocketAddress) remoteAddress);
+        return mConn.openChannel();
+    }
+
+    private class ChannelOpenListener implements IChannelOpenListener {
+        @Override
+        public void channelOpened(ChannelControlBlock channel) {
+            channel.getReadInterface().setFullBufferAcceptor(new InitialBufferAcceptor(channel));
+            channel.getReadInterface().getEmptyBufferAcceptor().accept(ByteBuffer.allocate(INITIAL_MESSAGE_SIZE));
+        }
+    }
+
+    private class InitialBufferAcceptor implements ICloseableBufferAcceptor {
+        private final ChannelControlBlock ccb;
+
+        private NetworkOutputChannel noc;
+
+        public InitialBufferAcceptor(ChannelControlBlock ccb) {
+            this.ccb = ccb;
+        }
+
+        @Override
+        public void accept(ByteBuffer buffer) {
+            JobId jobId = new JobId(buffer.getLong());
+            int partition = buffer.getInt();
+            if (LOGGER.isLoggable(Level.FINE)) {
+                LOGGER.fine("Received initial dataset partition read request for JobId: " + jobId + " partition: "
+                        + partition + " on channel: " + ccb);
+            }
+            noc = new NetworkOutputChannel(ccb, 1);
+            try {
+                partitionManager.initializeDatasetPartitionReader(jobId, partition, noc);
+            } catch (HyracksException e) {
+                noc.abort();
+            }
+        }
+
+        @Override
+        public void close() {
+
+        }
+
+        @Override
+        public void error(int ecode) {
+            if (noc != null) {
+                noc.abort();
+            }
+        }
+    }
+
+    public MuxDemuxPerformanceCounters getPerformanceCounters() {
+        return md.getPerformanceCounters();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkInputChannel.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkInputChannel.java
deleted file mode 100644
index 1d5af84..0000000
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkInputChannel.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.control.nc.net;
-
-import java.net.SocketAddress;
-import java.nio.ByteBuffer;
-import java.util.ArrayDeque;
-import java.util.Queue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.channels.IInputChannel;
-import edu.uci.ics.hyracks.api.channels.IInputChannelMonitor;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.partitions.PartitionId;
-import edu.uci.ics.hyracks.net.buffers.IBufferAcceptor;
-import edu.uci.ics.hyracks.net.buffers.ICloseableBufferAcceptor;
-import edu.uci.ics.hyracks.net.protocols.muxdemux.ChannelControlBlock;
-
-public class NetworkInputChannel implements IInputChannel {
-    private static final Logger LOGGER = Logger.getLogger(NetworkInputChannel.class.getName());
-
-    private final NetworkManager netManager;
-
-    private final SocketAddress remoteAddress;
-
-    private final PartitionId partitionId;
-
-    private final Queue<ByteBuffer> fullQueue;
-
-    private final int nBuffers;
-
-    private ChannelControlBlock ccb;
-
-    private IInputChannelMonitor monitor;
-
-    private Object attachment;
-
-    public NetworkInputChannel(NetworkManager netManager, SocketAddress remoteAddress, PartitionId partitionId,
-            int nBuffers) {
-        this.netManager = netManager;
-        this.remoteAddress = remoteAddress;
-        this.partitionId = partitionId;
-        fullQueue = new ArrayDeque<ByteBuffer>(nBuffers);
-        this.nBuffers = nBuffers;
-    }
-
-    @Override
-    public void registerMonitor(IInputChannelMonitor monitor) {
-        this.monitor = monitor;
-    }
-
-    @Override
-    public void setAttachment(Object attachment) {
-        this.attachment = attachment;
-    }
-
-    @Override
-    public Object getAttachment() {
-        return attachment;
-    }
-
-    @Override
-    public synchronized ByteBuffer getNextBuffer() {
-        return fullQueue.poll();
-    }
-
-    @Override
-    public void recycleBuffer(ByteBuffer buffer) {
-        buffer.clear();
-        ccb.getReadInterface().getEmptyBufferAcceptor().accept(buffer);
-    }
-
-    @Override
-    public void open(IHyracksTaskContext ctx) throws HyracksDataException {
-        try {
-            ccb = netManager.connect(remoteAddress);
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        }
-        ccb.getReadInterface().setFullBufferAcceptor(new ReadFullBufferAcceptor());
-        ccb.getWriteInterface().setEmptyBufferAcceptor(new WriteEmptyBufferAcceptor());
-        for (int i = 0; i < nBuffers; ++i) {
-            ccb.getReadInterface().getEmptyBufferAcceptor().accept(ctx.allocateFrame());
-        }
-        ByteBuffer writeBuffer = ByteBuffer.allocate(NetworkManager.INITIAL_MESSAGE_SIZE);
-        writeBuffer.putLong(partitionId.getJobId().getId());
-        writeBuffer.putInt(partitionId.getConnectorDescriptorId().getId());
-        writeBuffer.putInt(partitionId.getSenderIndex());
-        writeBuffer.putInt(partitionId.getReceiverIndex());
-        writeBuffer.flip();
-        if (LOGGER.isLoggable(Level.FINE)) {
-            LOGGER.fine("Sending partition request: " + partitionId + " on channel: " + ccb);
-        }
-        ccb.getWriteInterface().getFullBufferAcceptor().accept(writeBuffer);
-        ccb.getWriteInterface().getFullBufferAcceptor().close();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-
-    }
-
-    private class ReadFullBufferAcceptor implements ICloseableBufferAcceptor {
-        @Override
-        public void accept(ByteBuffer buffer) {
-            fullQueue.add(buffer);
-            monitor.notifyDataAvailability(NetworkInputChannel.this, 1);
-        }
-
-        @Override
-        public void close() {
-            monitor.notifyEndOfStream(NetworkInputChannel.this);
-        }
-
-        @Override
-        public void error(int ecode) {
-            monitor.notifyFailure(NetworkInputChannel.this);
-        }
-    }
-
-    private class WriteEmptyBufferAcceptor implements IBufferAcceptor {
-        @Override
-        public void accept(ByteBuffer buffer) {
-            // do nothing
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkManager.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkManager.java
index b805595..c8e4e94 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkManager.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkManager.java
@@ -27,6 +27,8 @@
 import edu.uci.ics.hyracks.api.exceptions.HyracksException;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.partitions.PartitionId;
+import edu.uci.ics.hyracks.comm.channels.IChannelConnectionFactory;
+import edu.uci.ics.hyracks.comm.channels.NetworkOutputChannel;
 import edu.uci.ics.hyracks.control.nc.partitions.PartitionManager;
 import edu.uci.ics.hyracks.net.buffers.ICloseableBufferAcceptor;
 import edu.uci.ics.hyracks.net.exceptions.NetException;
@@ -36,7 +38,7 @@
 import edu.uci.ics.hyracks.net.protocols.muxdemux.MuxDemux;
 import edu.uci.ics.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
 
-public class NetworkManager {
+public class NetworkManager implements IChannelConnectionFactory {
     private static final Logger LOGGER = Logger.getLogger(NetworkManager.class.getName());
 
     private static final int MAX_CONNECTION_ATTEMPTS = 5;
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkOutputChannel.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkOutputChannel.java
deleted file mode 100644
index 9024e18..0000000
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/NetworkOutputChannel.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.control.nc.net;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayDeque;
-import java.util.Deque;
-
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.net.buffers.IBufferAcceptor;
-import edu.uci.ics.hyracks.net.protocols.muxdemux.ChannelControlBlock;
-
-public class NetworkOutputChannel implements IFrameWriter {
-    private final ChannelControlBlock ccb;
-
-    private final int nBuffers;
-
-    private final Deque<ByteBuffer> emptyStack;
-
-    private boolean aborted;
-
-    public NetworkOutputChannel(ChannelControlBlock ccb, int nBuffers) {
-        this.ccb = ccb;
-        this.nBuffers = nBuffers;
-        emptyStack = new ArrayDeque<ByteBuffer>(nBuffers);
-        ccb.getWriteInterface().setEmptyBufferAcceptor(new WriteEmptyBufferAcceptor());
-    }
-
-    public void setTaskContext(IHyracksTaskContext ctx) {
-        for (int i = 0; i < nBuffers; ++i) {
-            emptyStack.push(ByteBuffer.allocateDirect(ctx.getFrameSize()));
-        }
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        ByteBuffer destBuffer = null;
-        synchronized (this) {
-            while (true) {
-                if (aborted) {
-                    throw new HyracksDataException("Connection has been aborted");
-                }
-                destBuffer = emptyStack.poll();
-                if (destBuffer != null) {
-                    break;
-                }
-                try {
-                    wait();
-                } catch (InterruptedException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-        }
-        buffer.position(0);
-        buffer.limit(destBuffer.capacity());
-        destBuffer.clear();
-        destBuffer.put(buffer);
-        destBuffer.flip();
-        ccb.getWriteInterface().getFullBufferAcceptor().accept(destBuffer);
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        ccb.getWriteInterface().getFullBufferAcceptor().error(1);
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        ccb.getWriteInterface().getFullBufferAcceptor().close();
-    }
-
-    void abort() {
-        ccb.getWriteInterface().getFullBufferAcceptor().error(1);
-        synchronized (NetworkOutputChannel.this) {
-            aborted = true;
-            NetworkOutputChannel.this.notifyAll();
-        }
-    }
-
-    private class WriteEmptyBufferAcceptor implements IBufferAcceptor {
-        @Override
-        public void accept(ByteBuffer buffer) {
-            synchronized (NetworkOutputChannel.this) {
-                emptyStack.push(buffer);
-                NetworkOutputChannel.this.notifyAll();
-            }
-        }
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionInputChannel.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionInputChannel.java
index 16e31f7..ba6e6c3 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionInputChannel.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/MaterializedPartitionInputChannel.java
@@ -21,7 +21,7 @@
 import edu.uci.ics.hyracks.api.channels.IInputChannel;
 import edu.uci.ics.hyracks.api.channels.IInputChannelMonitor;
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.partitions.IPartition;
 import edu.uci.ics.hyracks.api.partitions.PartitionId;
@@ -82,7 +82,7 @@
     }
 
     @Override
-    public void open(IHyracksTaskContext ctx) throws HyracksDataException {
+    public void open(IHyracksCommonContext ctx) throws HyracksDataException {
         for (int i = 0; i < nBuffers; ++i) {
             emptyQueue.add(ctx.allocateFrame());
         }
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PartitionManager.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PartitionManager.java
index 45c091a..ea88a75 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PartitionManager.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/partitions/PartitionManager.java
@@ -28,12 +28,12 @@
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.partitions.IPartition;
 import edu.uci.ics.hyracks.api.partitions.PartitionId;
+import edu.uci.ics.hyracks.comm.channels.NetworkOutputChannel;
 import edu.uci.ics.hyracks.control.common.job.PartitionDescriptor;
 import edu.uci.ics.hyracks.control.common.job.PartitionState;
 import edu.uci.ics.hyracks.control.nc.NodeControllerService;
 import edu.uci.ics.hyracks.control.nc.io.IOManager;
 import edu.uci.ics.hyracks.control.nc.io.WorkspaceFileFactory;
-import edu.uci.ics.hyracks.control.nc.net.NetworkOutputChannel;
 import edu.uci.ics.hyracks.control.nc.resources.DefaultDeallocatableRegistry;
 
 public class PartitionManager {
@@ -98,7 +98,7 @@
         List<IPartition> pList = partitionMap.get(partitionId);
         if (pList != null && !pList.isEmpty()) {
             IPartition partition = pList.get(0);
-            writer.setTaskContext(partition.getTaskContext());
+            writer.setFrameSize(partition.getTaskContext().getFrameSize());
             partition.writeTo(writer);
             if (!partition.isReusable()) {
                 partitionMap.remove(partitionId);
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
index bb9669d..7ed9d11 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
@@ -22,10 +22,10 @@
 import edu.uci.ics.hyracks.api.comm.PartitionChannel;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.partitions.PartitionId;
+import edu.uci.ics.hyracks.comm.channels.NetworkInputChannel;
 import edu.uci.ics.hyracks.control.common.work.AbstractWork;
 import edu.uci.ics.hyracks.control.nc.Joblet;
 import edu.uci.ics.hyracks.control.nc.NodeControllerService;
-import edu.uci.ics.hyracks.control.nc.net.NetworkInputChannel;
 
 public class ReportPartitionAvailabilityWork extends AbstractWork {
     private final NodeControllerService ncs;
diff --git a/hyracks/hyracks-data/hyracks-data-std/pom.xml b/hyracks/hyracks-data/hyracks-data-std/pom.xml
index 3f051f9..8f5f04e 100644
--- a/hyracks/hyracks-data/hyracks-data-std/pom.xml
+++ b/hyracks/hyracks-data/hyracks-data-std/pom.xml
@@ -16,8 +16,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/RawUTF8StringPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/RawUTF8StringPointable.java
new file mode 100644
index 0000000..c90ce5a
--- /dev/null
+++ b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/RawUTF8StringPointable.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.data.std.primitive;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.api.AbstractPointable;
+import edu.uci.ics.hyracks.data.std.api.IComparable;
+import edu.uci.ics.hyracks.data.std.api.IHashable;
+import edu.uci.ics.hyracks.data.std.api.IPointable;
+import edu.uci.ics.hyracks.data.std.api.IPointableFactory;
+
+/**
+ * This class provides the raw bytes-based comparison and hash function for UTF8 strings.
+ * Note that the comparison may not deliver the correct ordering for certain languages that include 2 or 3 bytes characters.
+ * But it works for single-byte character languages.
+ */
+public final class RawUTF8StringPointable extends AbstractPointable implements IHashable, IComparable {
+    public static final ITypeTraits TYPE_TRAITS = new ITypeTraits() {
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public boolean isFixedLength() {
+            return false;
+        }
+
+        @Override
+        public int getFixedLength() {
+            return 0;
+        }
+    };
+
+    public static final IPointableFactory FACTORY = new IPointableFactory() {
+        private static final long serialVersionUID = 1L;
+
+        @Override
+        public IPointable createPointable() {
+            return new RawUTF8StringPointable();
+        }
+
+        @Override
+        public ITypeTraits getTypeTraits() {
+            return TYPE_TRAITS;
+        }
+    };
+
+    @Override
+    public int compareTo(IPointable pointer) {
+        return compareTo(pointer.getByteArray(), pointer.getStartOffset(), pointer.getLength());
+    }
+
+    @Override
+    public int compareTo(byte[] bytes, int start, int length) {
+        int utflen1 = UTF8StringPointable.getUTFLength(this.bytes, this.start);
+        int utflen2 = UTF8StringPointable.getUTFLength(bytes, start);
+
+        int c1 = 0;
+        int c2 = 0;
+
+        int s1Start = this.start + 2;
+        int s2Start = start + 2;
+
+        while (c1 < utflen1 && c2 < utflen2) {
+            char ch1 = (char) this.bytes[s1Start + c1];
+            char ch2 = (char) bytes[s2Start + c2];
+
+            if (ch1 != ch2) {
+                return ch1 - ch2;
+            }
+            c1++;
+            c2++;
+        }
+        return utflen1 - utflen2;
+    }
+
+    @Override
+    public int hash() {
+        int h = 0;
+        int utflen = UTF8StringPointable.getUTFLength(bytes, start);
+        int sStart = start + 2;
+        int c = 0;
+
+        while (c < utflen) {
+            char ch = (char) bytes[sStart + c];
+            h = 31 * h + ch;
+            c++;
+        }
+        return h;
+    }
+
+    public void toString(StringBuilder buffer) {
+        UTF8StringPointable.toString(buffer, bytes, start);
+    }
+}
diff --git a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringPointable.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringPointable.java
index f6d6093..866ebb0 100644
--- a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringPointable.java
+++ b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/primitive/UTF8StringPointable.java
@@ -216,4 +216,4 @@
     public void toString(StringBuilder buffer) {
         toString(buffer, bytes, start);
     }
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ArrayBackedValueStorage.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ArrayBackedValueStorage.java
index e8dc9b4..7d7d2c1 100644
--- a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ArrayBackedValueStorage.java
+++ b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ArrayBackedValueStorage.java
@@ -1,34 +1,28 @@
 package edu.uci.ics.hyracks.data.std.util;
 
 import java.io.DataOutput;
-import java.io.DataOutputStream;
 import java.io.IOException;
 
 import edu.uci.ics.hyracks.data.std.api.IMutableValueStorage;
 import edu.uci.ics.hyracks.data.std.api.IValueReference;
 
 public class ArrayBackedValueStorage implements IMutableValueStorage {
-    private final ByteArrayAccessibleOutputStream baaos;
-    private final DataOutputStream dos;
-
-    public ArrayBackedValueStorage() {
-        baaos = new ByteArrayAccessibleOutputStream();
-        dos = new DataOutputStream(baaos);
-    }
+   
+    private final GrowableArray data = new GrowableArray();
 
     @Override
     public void reset() {
-        baaos.reset();
+        data.reset();
     }
 
     @Override
     public DataOutput getDataOutput() {
-        return dos;
+        return data.getDataOutput();
     }
 
     @Override
     public byte[] getByteArray() {
-        return baaos.getByteArray();
+        return data.getByteArray();
     }
 
     @Override
@@ -38,12 +32,12 @@
 
     @Override
     public int getLength() {
-        return baaos.size();
+        return data.getLength();
     }
 
     public void append(IValueReference value) {
         try {
-            dos.write(value.getByteArray(), value.getStartOffset(), value.getLength());
+            data.append(value);
         } catch (IOException e) {
             e.printStackTrace();
         }
diff --git a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ByteArrayAccessibleOutputStream.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ByteArrayAccessibleOutputStream.java
index 8508287..c0ba163 100644
--- a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ByteArrayAccessibleOutputStream.java
+++ b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/ByteArrayAccessibleOutputStream.java
@@ -21,6 +21,14 @@
 public class ByteArrayAccessibleOutputStream extends ByteArrayOutputStream {
     private static final Logger LOGGER = Logger.getLogger(ByteArrayAccessibleOutputStream.class.getName());
 
+    public ByteArrayAccessibleOutputStream() {
+        super();
+    }
+
+    public ByteArrayAccessibleOutputStream(int size) {
+        super(size);
+    }
+
     public byte[] getByteArray() {
         return buf;
     }
diff --git a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/GrowableArray.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/GrowableArray.java
new file mode 100644
index 0000000..c174d4e
--- /dev/null
+++ b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/util/GrowableArray.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.data.std.util;
+
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.api.IValueReference;
+
+public class GrowableArray implements IDataOutputProvider {
+    private final ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+    private final DataOutputStream dos = new DataOutputStream(baaos);
+
+    @Override
+    public DataOutput getDataOutput() {
+        return dos;
+    }
+
+    public void reset() {
+        baaos.reset();
+    }
+
+    public byte[] getByteArray() {
+        return baaos.getByteArray();
+    }
+
+    public int getLength() {
+        return baaos.size();
+    }
+
+    public void append(IValueReference value) throws IOException {
+        dos.write(value.getByteArray(), value.getStartOffset(), value.getLength());
+    }
+}
diff --git a/hyracks/hyracks-dataflow-common/pom.xml b/hyracks/hyracks-dataflow-common/pom.xml
index a0ffb66..1a2950b 100644
--- a/hyracks/hyracks-dataflow-common/pom.xml
+++ b/hyracks/hyracks-dataflow-common/pom.xml
@@ -15,8 +15,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleBuilder.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleBuilder.java
index 989bc6b..8c865c4 100644
--- a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleBuilder.java
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleBuilder.java
@@ -15,14 +15,13 @@
 package edu.uci.ics.hyracks.dataflow.common.comm.io;
 
 import java.io.DataOutput;
-import java.io.DataOutputStream;
 import java.io.IOException;
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
-import edu.uci.ics.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
 
 /**
  * Array backed tuple builder.
@@ -30,25 +29,21 @@
  * @author vinayakb
  */
 public class ArrayTupleBuilder implements IDataOutputProvider {
-    private final ByteArrayAccessibleOutputStream baaos;
-    private final DataOutputStream dos;
+    private final GrowableArray fieldData = new GrowableArray();
     private final int[] fEndOffsets;
     private int nextField;
 
     public ArrayTupleBuilder(int nFields) {
-        baaos = new ByteArrayAccessibleOutputStream();
-        dos = new DataOutputStream(baaos);
         fEndOffsets = new int[nFields];
     }
 
     /**
      * Resets the builder.
-     * 
      * reset() must be called before attempting to create a new tuple.
      */
     public void reset() {
         nextField = 0;
-        baaos.reset();
+        fieldData.reset();
     }
 
     /**
@@ -66,7 +61,7 @@
      * @return Data byte array.
      */
     public byte[] getByteArray() {
-        return baaos.getByteArray();
+        return fieldData.getByteArray();
     }
 
     /**
@@ -75,7 +70,7 @@
      * @return data area size.
      */
     public int getSize() {
-        return baaos.size();
+        return fieldData.getLength();
     }
 
     /**
@@ -96,14 +91,15 @@
         int fStartOffset = accessor.getFieldStartOffset(tIndex, fIndex);
         int fLen = accessor.getFieldEndOffset(tIndex, fIndex) - fStartOffset;
         try {
-            dos.write(accessor.getBuffer().array(), startOffset + accessor.getFieldSlotsLength() + fStartOffset, fLen);
+            fieldData.getDataOutput().write(accessor.getBuffer().array(),
+                    startOffset + accessor.getFieldSlotsLength() + fStartOffset, fLen);
             if (FrameConstants.DEBUG_FRAME_IO) {
-                dos.writeInt(FrameConstants.FRAME_FIELD_MAGIC);
+                fieldData.getDataOutput().writeInt(FrameConstants.FRAME_FIELD_MAGIC);
             }
         } catch (IOException e) {
             throw new HyracksDataException(e);
         }
-        fEndOffsets[nextField++] = baaos.size();
+        fEndOffsets[nextField++] = fieldData.getLength();
     }
 
     /**
@@ -117,8 +113,8 @@
      * @throws HyracksDataException
      */
     public <T> void addField(ISerializerDeserializer<T> serDeser, T instance) throws HyracksDataException {
-        serDeser.serialize(instance, dos);
-        fEndOffsets[nextField++] = baaos.size();
+        serDeser.serialize(instance, fieldData.getDataOutput());
+        fEndOffsets[nextField++] = fieldData.getLength();
     }
 
     /**
@@ -134,11 +130,11 @@
      */
     public void addField(byte[] bytes, int start, int length) throws HyracksDataException {
         try {
-            dos.write(bytes, start, length);
+            fieldData.getDataOutput().write(bytes, start, length);
         } catch (IOException e) {
             throw new HyracksDataException(e);
         }
-        fEndOffsets[nextField++] = baaos.size();
+        fEndOffsets[nextField++] = fieldData.getLength();
     }
 
     /**
@@ -146,7 +142,14 @@
      */
     @Override
     public DataOutput getDataOutput() {
-        return dos;
+        return fieldData.getDataOutput();
+    }
+
+    /**
+     * Get the growable array storing the field data.
+     */
+    public GrowableArray getFieldData() {
+        return fieldData;
     }
 
     /**
@@ -156,6 +159,6 @@
      * data.
      */
     public void addFieldEndOffset() {
-        fEndOffsets[nextField++] = baaos.size();
+        fEndOffsets[nextField++] = fieldData.getLength();
     }
 }
\ No newline at end of file
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameOutputStream.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameOutputStream.java
new file mode 100644
index 0000000..07f6ba2
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameOutputStream.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.common.comm.io;
+
+import java.nio.ByteBuffer;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+
+public class FrameOutputStream extends ByteArrayAccessibleOutputStream {
+    private static final Logger LOGGER = Logger.getLogger(FrameOutputStream.class.getName());
+
+    private final FrameTupleAppender frameTupleAppender;
+
+    public FrameOutputStream(int frameSize) {
+        super(frameSize);
+        this.frameTupleAppender = new FrameTupleAppender(frameSize);
+    }
+
+    public void reset(ByteBuffer buffer, boolean clear) {
+        frameTupleAppender.reset(buffer, clear);
+    }
+
+    public int getTupleCount() {
+        int tupleCount = frameTupleAppender.getTupleCount();
+        if (LOGGER.isLoggable(Level.FINEST)) {
+            LOGGER.finest("appendTuple(): tuple count: " + tupleCount);
+        }
+        return tupleCount;
+    }
+
+    public boolean appendTuple() {
+        if (LOGGER.isLoggable(Level.FINEST)) {
+            LOGGER.finest("appendTuple(): tuple size: " + count);
+        }
+        boolean appended = frameTupleAppender.append(buf, 0, count);
+        count = 0;
+        return appended;
+    }
+}
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ResultFrameTupleAccessor.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ResultFrameTupleAccessor.java
new file mode 100644
index 0000000..915a436
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ResultFrameTupleAccessor.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.common.comm.io;
+
+import java.io.DataInputStream;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.FrameHelper;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+
+public class ResultFrameTupleAccessor implements IFrameTupleAccessor {
+
+    private final int frameSize;
+    private ByteBuffer buffer;
+
+    public ResultFrameTupleAccessor(int frameSize) {
+        this.frameSize = frameSize;
+    }
+
+    @Override
+    public void reset(ByteBuffer buffer) {
+        this.buffer = buffer;
+    }
+
+    @Override
+    public ByteBuffer getBuffer() {
+        return buffer;
+    }
+
+    @Override
+    public int getTupleCount() {
+        return buffer.getInt(FrameHelper.getTupleCountOffset(frameSize));
+    }
+
+    @Override
+    public int getTupleStartOffset(int tupleIndex) {
+        return tupleIndex == 0 ? 0 : buffer.getInt(FrameHelper.getTupleCountOffset(frameSize) - 4 * tupleIndex);
+    }
+
+    @Override
+    public int getTupleEndOffset(int tupleIndex) {
+        return buffer.getInt(FrameHelper.getTupleCountOffset(frameSize) - 4 * (tupleIndex + 1));
+    }
+
+    @Override
+    public int getFieldStartOffset(int tupleIndex, int fIdx) {
+        return fIdx == 0 ? 0 : buffer.getInt(getTupleStartOffset(tupleIndex) + (fIdx - 1) * 4);
+    }
+
+    @Override
+    public int getFieldEndOffset(int tupleIndex, int fIdx) {
+        return buffer.getInt(getTupleStartOffset(tupleIndex) + fIdx * 4);
+    }
+
+    @Override
+    public int getFieldLength(int tupleIndex, int fIdx) {
+        return getFieldEndOffset(tupleIndex, fIdx) - getFieldStartOffset(tupleIndex, fIdx);
+    }
+
+    @Override
+    public int getFieldSlotsLength() {
+        return getFieldCount() * 4;
+    }
+
+    public void prettyPrint() {
+        ByteBufferInputStream bbis = new ByteBufferInputStream();
+        DataInputStream dis = new DataInputStream(bbis);
+        int tc = getTupleCount();
+        System.err.println("TC: " + tc);
+        for (int i = 0; i < tc; ++i) {
+            System.err.print(i + ":(" + getTupleStartOffset(i) + ", " + getTupleEndOffset(i) + ")[");
+
+            bbis.setByteBuffer(buffer, getTupleStartOffset(i));
+            System.err.print(dis);
+
+            System.err.println("]");
+        }
+    }
+
+    @Override
+    public int getFieldCount() {
+        return 1;
+    }
+}
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/DoubleNormalizedKeyComputerFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/DoubleNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..e95e9c2
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/DoubleNormalizedKeyComputerFactory.java
@@ -0,0 +1,28 @@
+package edu.uci.ics.hyracks.dataflow.common.data.normalizers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+
+public class DoubleNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int prefix = IntegerSerializerDeserializer.getInt(bytes, start);
+                if (prefix >= 0) {
+                    return prefix ^ Integer.MIN_VALUE;
+                } else {
+                    return (int) ((long) 0xffffffff - (long) prefix);
+                }
+            }
+
+        };
+    }
+
+}
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/FloatNormalizedKeyComputerFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/FloatNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..d58afc1
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/FloatNormalizedKeyComputerFactory.java
@@ -0,0 +1,28 @@
+package edu.uci.ics.hyracks.dataflow.common.data.normalizers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+
+public class FloatNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                int prefix = IntegerSerializerDeserializer.getInt(bytes, start);
+                if (prefix >= 0) {
+                    return prefix ^ Integer.MIN_VALUE;
+                } else {
+                    return (int) ((long) 0xffffffff - (long) prefix);
+                }
+            }
+
+        };
+    }
+
+}
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/Integer64NormalizedKeyComputerFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/Integer64NormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..4589909
--- /dev/null
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/Integer64NormalizedKeyComputerFactory.java
@@ -0,0 +1,55 @@
+package edu.uci.ics.hyracks.dataflow.common.data.normalizers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
+
+public class Integer64NormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+    private static final long serialVersionUID = 8735044913496854551L;
+
+    @Override
+    public INormalizedKeyComputer createNormalizedKeyComputer() {
+        return new INormalizedKeyComputer() {
+            private static final int POSTIVE_LONG_MASK = (3 << 30);
+            private static final int NON_NEGATIVE_INT_MASK = (2 << 30);
+            private static final int NEGATIVE_LONG_MASK = (0 << 30);
+
+            @Override
+            public int normalize(byte[] bytes, int start, int length) {
+                long value = Integer64SerializerDeserializer.getLong(bytes, start);
+                int highValue = (int) (value >> 32);
+                if (highValue > 0) {
+                    /**
+                     * larger than Integer.MAX
+                     */
+                    int highNmk = getKey(highValue);
+                    highNmk >>= 2;
+                    highNmk |= POSTIVE_LONG_MASK;
+                    return highNmk;
+                } else if (highValue == 0) {
+                    /**
+                     * smaller than Integer.MAX but >=0
+                     */
+                    int lowNmk = (int) value;
+                    lowNmk >>= 2;
+                    lowNmk |= NON_NEGATIVE_INT_MASK;
+                    return lowNmk;
+                } else {
+                    /**
+                     * less than 0: have not optimized for that
+                     */
+                    int highNmk = getKey(highValue);
+                    highNmk >>= 2;
+                    highNmk |= NEGATIVE_LONG_MASK;
+                    return highNmk;
+                }
+            }
+
+            private int getKey(int value) {
+                return value ^ Integer.MIN_VALUE;
+            }
+
+        };
+    }
+}
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/IntegerNormalizedKeyComputerFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/IntegerNormalizedKeyComputerFactory.java
index 2f7a778..6a01842 100644
--- a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/IntegerNormalizedKeyComputerFactory.java
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/normalizers/IntegerNormalizedKeyComputerFactory.java
@@ -27,8 +27,7 @@
             @Override
             public int normalize(byte[] bytes, int start, int length) {
                 int value = IntegerSerializerDeserializer.getInt(bytes, start);
-                long unsignedValue = (long) value;
-                return (int) ((unsignedValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+                return value ^Integer.MIN_VALUE;
             }
         };
     }
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/LongParserFactory.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/LongParserFactory.java
index 6a9dab7..d9191c7 100644
--- a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/LongParserFactory.java
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/parsers/LongParserFactory.java
@@ -32,7 +32,7 @@
         return new IValueParser() {
             @Override
             public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
-                long n = 0;
+                int n = 0;
                 int sign = 1;
                 int i = 0;
                 boolean pre = true;
@@ -102,7 +102,7 @@
                             throw new HyracksDataException("Encountered " + ch);
                     }
                 }
-
+                
                 try {
                     out.writeLong(n * sign);
                 } catch (IOException e) {
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
index 51645c4..ec3c2be 100644
--- a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
@@ -52,10 +52,10 @@
                     h += fh;
                 }
                 if (h < 0) {
-                    h = -h;
+                    h = -(h+1);
                 }
                 return h % nParts;
             }
         };
     }
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java
index a29209c..8a30a71 100644
--- a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java
@@ -18,17 +18,19 @@
 import java.io.IOException;
 
 public class StringUtils {
-    public static void writeCharAsModifiedUTF8(char c, DataOutput dos) throws IOException {
-
+    public static int writeCharAsModifiedUTF8(char c, DataOutput dos) throws IOException {
         if (c >= 0x0000 && c <= 0x007F) {
             dos.writeByte(c);
+            return 1;
         } else if (c <= 0x07FF) {
             dos.writeByte((byte) (0xC0 | ((c >> 6) & 0x3F)));
             dos.writeByte((byte) (0x80 | (c & 0x3F)));
+            return 2;
         } else {
             dos.writeByte((byte) (0xE0 | ((c >> 12) & 0x0F)));
             dos.writeByte((byte) (0x80 | ((c >> 6) & 0x3F)));
             dos.writeByte((byte) (0x80 | (c & 0x3F)));
+            return 3;
         }
     }
 
diff --git a/hyracks/hyracks-dataflow-hadoop/pom.xml b/hyracks/hyracks-dataflow-hadoop/pom.xml
index f9d5153..f5135f8 100644
--- a/hyracks/hyracks-dataflow-hadoop/pom.xml
+++ b/hyracks/hyracks-dataflow-hadoop/pom.xml
@@ -1,8 +1,6 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
   <artifactId>hyracks-dataflow-hadoop</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <name>hyracks-dataflow-hadoop</name>
 
   <parent>
@@ -18,8 +16,8 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-dataflow-std/pom.xml b/hyracks/hyracks-dataflow-std/pom.xml
index bf27d20..2cf0fdc 100644
--- a/hyracks/hyracks-dataflow-std/pom.xml
+++ b/hyracks/hyracks-dataflow-std/pom.xml
@@ -1,8 +1,6 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
   <artifactId>hyracks-dataflow-std</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <name>hyracks-dataflow-std</name>
 
   <parent>
@@ -18,8 +16,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/HashSpillableTableFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/HashSpillableTableFactory.java
index f2b56fa..f86d9fb 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/HashSpillableTableFactory.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/group/HashSpillableTableFactory.java
@@ -256,8 +256,6 @@
 
                 outputAppender.reset(outputFrame, true);
 
-                writer.open();
-
                 if (tPointers == null) {
                     // Not sorted
                     for (int i = 0; i < tableSize; ++i) {
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java
index 3e5e30f..d86f1d5 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java
@@ -17,7 +17,6 @@
 import java.io.DataOutput;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.List;
 
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
@@ -47,11 +46,18 @@
     private final ISerializableTable table;
 	private final int tableSize;
     private final TuplePointer storedTuplePointer;
+    private final boolean reverseOutputOrder;	//Should we reverse the order of tuples, we are writing in output
     
     public InMemoryHashJoin(IHyracksTaskContext ctx, int tableSize, FrameTupleAccessor accessor0,
             ITuplePartitionComputer tpc0, FrameTupleAccessor accessor1, ITuplePartitionComputer tpc1,
             FrameTuplePairComparator comparator, boolean isLeftOuter, INullWriter[] nullWriters1, ISerializableTable table)
             throws HyracksDataException {
+    	this(ctx, tableSize, accessor0, tpc0, accessor1, tpc1, comparator, isLeftOuter, nullWriters1, table, false);
+    }
+    
+    public InMemoryHashJoin(IHyracksTaskContext ctx, int tableSize, FrameTupleAccessor accessor0,
+            ITuplePartitionComputer tpc0, FrameTupleAccessor accessor1, ITuplePartitionComputer tpc1,
+            FrameTuplePairComparator comparator, boolean isLeftOuter, INullWriter[] nullWriters1, ISerializableTable table, boolean reverse) throws HyracksDataException {
     	this.tableSize = tableSize;
        	this.table = table;
        	storedTuplePointer = new TuplePointer();
@@ -76,6 +82,7 @@
         } else {
             nullTupleBuild = null;
         }
+    	reverseOutputOrder = reverse;
     }
 
     public void build(ByteBuffer buffer) throws HyracksDataException {
@@ -108,18 +115,13 @@
                 int c = tpComparator.compare(accessorProbe, i, accessorBuild, tIndex);
                 if (c == 0) {
                     matchFound = true;
-                    if (!appender.appendConcat(accessorProbe, i, accessorBuild, tIndex)) {
-                        flushFrame(outBuffer, writer);
-                        appender.reset(outBuffer, true);
-                        if (!appender.appendConcat(accessorProbe, i, accessorBuild, tIndex)) {
-                            throw new IllegalStateException();
-                        }
-                    }
+                    appendToResult(i, tIndex, writer);
                 }
             } while (true);
 
             if (!matchFound && isLeftOuter) {
-                if (!appender.appendConcat(accessorProbe, i, nullTupleBuild.getFieldEndOffsets(),
+                
+            	if (!appender.appendConcat(accessorProbe, i, nullTupleBuild.getFieldEndOffsets(),
                         nullTupleBuild.getByteArray(), 0, nullTupleBuild.getSize())) {
                     flushFrame(outBuffer, writer);
                     appender.reset(outBuffer, true);
@@ -128,6 +130,7 @@
                         throw new IllegalStateException();
                     }
                 }
+                
             }
         }
     }
@@ -145,23 +148,25 @@
         buffer.position(0);
         buffer.limit(buffer.capacity());
     }
-
-    private static class Link {
-        private static final int INIT_POINTERS_SIZE = 8;
-
-        long[] pointers;
-        int size;
-
-        Link() {
-            pointers = new long[INIT_POINTERS_SIZE];
-            size = 0;
-        }
-
-        void add(long pointer) {
-            if (size >= pointers.length) {
-                pointers = Arrays.copyOf(pointers, pointers.length * 2);
+    
+    private void appendToResult(int probeSidetIx, int buildSidetIx, IFrameWriter writer) throws HyracksDataException{
+    	if(!reverseOutputOrder){
+    		if (!appender.appendConcat(accessorProbe, probeSidetIx, accessorBuild, buildSidetIx)) {
+                flushFrame(outBuffer, writer);
+                appender.reset(outBuffer, true);
+                if (!appender.appendConcat(accessorProbe, probeSidetIx, accessorBuild, buildSidetIx)) {
+                    throw new IllegalStateException();
+                }
             }
-            pointers[size++] = pointer;
-        }
+    	}
+    	else{
+    		if (!appender.appendConcat(accessorBuild, buildSidetIx, accessorProbe, probeSidetIx)) {
+                flushFrame(outBuffer, writer);
+                appender.reset(outBuffer, true);
+                if (!appender.appendConcat(accessorBuild, buildSidetIx, accessorProbe, probeSidetIx)) {
+                    throw new IllegalStateException();
+                }
+            }
+    	}
     }
 }
\ No newline at end of file
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java
index 7e84229..6870e71 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java
@@ -14,15 +14,18 @@
  */
 package edu.uci.ics.hyracks.dataflow.std.join;
 
+import java.io.DataOutput;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
 
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
 import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
@@ -42,9 +45,12 @@
     private RunFileReader runFileReader;
     private int currentMemSize = 0;
     private final RunFileWriter runFileWriter;
+    private final boolean isLeftOuter;
+    private final ArrayTupleBuilder nullTupleBuilder;
 
     public NestedLoopJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessor0, FrameTupleAccessor accessor1,
-            ITuplePairComparator comparators, int memSize) throws HyracksDataException {
+            ITuplePairComparator comparators, int memSize, boolean isLeftOuter, INullWriter[] nullWriters1)
+            throws HyracksDataException {
         this.accessorInner = accessor1;
         this.accessorOuter = accessor0;
         this.appender = new FrameTupleAppender(ctx.getFrameSize());
@@ -56,6 +62,19 @@
         this.memSize = memSize;
         this.ctx = ctx;
 
+        this.isLeftOuter = isLeftOuter;
+        if (isLeftOuter) {
+            int innerFieldCount = accessorInner.getFieldCount();
+            nullTupleBuilder = new ArrayTupleBuilder(innerFieldCount);
+            DataOutput out = nullTupleBuilder.getDataOutput();
+            for (int i = 0; i < innerFieldCount; i++) {
+                nullWriters1[i].writeNull(out);
+                nullTupleBuilder.addFieldEndOffset();
+            }
+        } else {
+            nullTupleBuilder = null;
+        }
+
         FileReference file = ctx.getJobletContext().createManagedWorkspaceFile(
                 this.getClass().getSimpleName() + this.toString());
         runFileWriter = new RunFileWriter(file, ctx.getIOManager());
@@ -108,9 +127,11 @@
         int tupleCount1 = accessorInner.getTupleCount();
 
         for (int i = 0; i < tupleCount0; ++i) {
+            boolean matchFound = false;
             for (int j = 0; j < tupleCount1; ++j) {
                 int c = compare(accessorOuter, i, accessorInner, j);
                 if (c == 0) {
+                    matchFound = true;
                     if (!appender.appendConcat(accessorOuter, i, accessorInner, j)) {
                         flushFrame(outBuffer, writer);
                         appender.reset(outBuffer, true);
@@ -120,6 +141,18 @@
                     }
                 }
             }
+
+            if (!matchFound && isLeftOuter) {
+                if (!appender.appendConcat(accessorOuter, i, nullTupleBuilder.getFieldEndOffsets(),
+                        nullTupleBuilder.getByteArray(), 0, nullTupleBuilder.getSize())) {
+                    flushFrame(outBuffer, writer);
+                    appender.reset(outBuffer, true);
+                    if (!appender.appendConcat(accessorOuter, i, nullTupleBuilder.getFieldEndOffsets(),
+                            nullTupleBuilder.getByteArray(), 0, nullTupleBuilder.getSize())) {
+                        throw new IllegalStateException();
+                    }
+                }
+            }
         }
     }
 
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
index a699703..0be01c1 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
@@ -25,6 +25,8 @@
 import edu.uci.ics.hyracks.api.dataflow.IActivityGraphBuilder;
 import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
 import edu.uci.ics.hyracks.api.dataflow.TaskId;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
 import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
@@ -47,13 +49,18 @@
     private static final long serialVersionUID = 1L;
     private final ITuplePairComparatorFactory comparatorFactory;
     private final int memSize;
+    private final boolean isLeftOuter;
+    private final INullWriterFactory[] nullWriterFactories1;
 
     public NestedLoopJoinOperatorDescriptor(IOperatorDescriptorRegistry spec,
-            ITuplePairComparatorFactory comparatorFactory, RecordDescriptor recordDescriptor, int memSize) {
+            ITuplePairComparatorFactory comparatorFactory, RecordDescriptor recordDescriptor, int memSize,
+            boolean isLeftOuter, INullWriterFactory[] nullWriterFactories1) {
         super(spec, 2, 1);
         this.comparatorFactory = comparatorFactory;
         this.recordDescriptors[0] = recordDescriptor;
         this.memSize = memSize;
+        this.isLeftOuter = isLeftOuter;
+        this.nullWriterFactories1 = nullWriterFactories1;
     }
 
     @Override
@@ -111,6 +118,13 @@
             final RecordDescriptor rd1 = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
             final ITuplePairComparator comparator = comparatorFactory.createTuplePairComparator(ctx);
 
+            final INullWriter[] nullWriters1 = isLeftOuter ? new INullWriter[nullWriterFactories1.length] : null;
+            if (isLeftOuter) {
+                for (int i = 0; i < nullWriterFactories1.length; i++) {
+                    nullWriters1[i] = nullWriterFactories1[i].createNullWriter();
+                }
+            }
+
             IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
                 private JoinCacheTaskState state;
 
@@ -118,8 +132,11 @@
                 public void open() throws HyracksDataException {
                     state = new JoinCacheTaskState(ctx.getJobletContext().getJobId(), new TaskId(getActivityId(),
                             partition));
+
                     state.joiner = new NestedLoopJoin(ctx, new FrameTupleAccessor(ctx.getFrameSize(), rd0),
-                            new FrameTupleAccessor(ctx.getFrameSize(), rd1), comparator, memSize);
+                            new FrameTupleAccessor(ctx.getFrameSize(), rd1), comparator, memSize, isLeftOuter,
+                            nullWriters1);
+
                 }
 
                 @Override
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
index 2905574..6e2b16a 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
@@ -81,6 +81,8 @@
 
     private int[] buildPSizeInFrames; //Used for partition tuning
     private int freeFramesCounter; //Used for partition tuning
+    
+    private boolean isTableEmpty;	//Added for handling the case, where build side is empty (tableSize is 0)
 
     public OptimizedHybridHashJoin(IHyracksTaskContext ctx, int memForJoin, int numOfPartitions, String rel0Name,
             String rel1Name, int[] keys0, int[] keys1, IBinaryComparator[] comparators, RecordDescriptor buildRd,
@@ -89,10 +91,10 @@
         this.memForJoin = memForJoin;
         this.buildRd = buildRd;
         this.probeRd = probeRd;
-        this.buildHpc = probeHpc;
-        this.probeHpc = buildHpc;
-        this.buildKeys = keys0;
-        this.probeKeys = keys1;
+        this.buildHpc = buildHpc; 	
+        this.probeHpc = probeHpc; 	
+        this.buildKeys = keys1; 	
+        this.probeKeys = keys0;		
         this.comparators = comparators;
         this.rel0Name = rel0Name;
         this.rel1Name = rel1Name;
@@ -117,10 +119,10 @@
         this.memForJoin = memForJoin;
         this.buildRd = buildRd;
         this.probeRd = probeRd;
-        this.buildHpc = probeHpc;
-        this.probeHpc = buildHpc;
-        this.buildKeys = keys0;
-        this.probeKeys = keys1;
+        this.buildHpc = buildHpc; 	
+        this.probeHpc = probeHpc; 	
+        this.buildKeys = keys1; 	
+        this.probeKeys = keys0;		
         this.comparators = comparators;
         this.rel0Name = rel0Name;
         this.rel1Name = rel1Name;
@@ -171,6 +173,12 @@
     public void build(ByteBuffer buffer) throws HyracksDataException {
         accessorBuild.reset(buffer);
         int tupleCount = accessorBuild.getTupleCount();
+   
+        boolean print = false;
+    	if(print){
+    		accessorBuild.prettyPrint();
+    	}
+        
         for (int i = 0; i < tupleCount; ++i) {
             int pid = buildHpc.partition(accessorBuild, i, numOfPartitions);
             processTuple(i, pid);
@@ -338,6 +346,7 @@
 
         createInMemoryJoiner(inMemTupCount);
         cacheInMemJoin();
+        this.isTableEmpty = (inMemTupCount == 0);
     }
 
     private void partitionTune() throws HyracksDataException {
@@ -457,10 +466,14 @@
     }
 
     public void probe(ByteBuffer buffer, IFrameWriter writer) throws HyracksDataException {
-
         accessorProbe.reset(buffer);
         int tupleCount = accessorProbe.getTupleCount();
 
+        boolean print = false;
+    	if(print){
+    		accessorProbe.prettyPrint();
+    	}
+        
         if (numOfSpilledParts == 0) {
             inMemJoiner.join(buffer, writer);
             return;
@@ -604,4 +617,8 @@
                 + freeFramesCounter;
         return s;
     }
+    
+    public boolean isTableEmpty(){
+    	return this.isTableEmpty;
+    }
 }
\ No newline at end of file
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
index 3a7ee2c..cf39416 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
@@ -31,6 +31,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
 import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputer;
@@ -135,6 +136,7 @@
         recordDescriptors[0] = recordDescriptor;
         this.isLeftOuter = isLeftOuter;
         this.nullWriterFactories1 = nullWriterFactories1;
+        
 
     }
 
@@ -167,10 +169,10 @@
         ProbeAndJoinActivityNode phase2 = new ProbeAndJoinActivityNode(probeAid, buildAid);
 
         builder.addActivity(this, phase1);
-        builder.addSourceEdge(0, phase1, 0);
+        builder.addSourceEdge(1, phase1, 0);
 
         builder.addActivity(this, phase2);
-        builder.addSourceEdge(1, phase2, 0);
+        builder.addSourceEdge(0, phase2, 0);
 
         builder.addBlockingEdge(phase1, phase2);
 
@@ -253,14 +255,8 @@
             for (int i = 0; i < comparatorFactories.length; i++) {
                 comparators[i] = comparatorFactories[i].createBinaryComparator();
             }
-
-            final INullWriter[] nullWriters1 = isLeftOuter ? new INullWriter[nullWriterFactories1.length] : null;
-            if (isLeftOuter) {
-                for (int i = 0; i < nullWriterFactories1.length; i++) {
-                    nullWriters1[i] = nullWriterFactories1[i].createNullWriter();
-                }
-            }
-
+            
+            
             IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
                 private BuildAndPartitionTaskState state = new BuildAndPartitionTaskState(ctx.getJobletContext()
                         .getJobId(), new TaskId(getActivityId(), partition));
@@ -278,9 +274,17 @@
                     state.memForJoin = memsize - 2;
                     state.numOfPartitions = getNumberOfPartitions(state.memForJoin, inputsize0, fudgeFactor,
                             nPartitions);
-                    state.hybridHJ = new OptimizedHybridHashJoin(ctx, state.memForJoin, state.numOfPartitions,
-                            PROBE_REL, BUILD_REL, probeKeys, buildKeys, comparators, probeRd, buildRd, probeHpc,
-                            buildHpc);
+                    if(!isLeftOuter){
+                    	state.hybridHJ = new OptimizedHybridHashJoin(ctx, state.memForJoin, state.numOfPartitions,
+                                PROBE_REL, BUILD_REL, probeKeys, buildKeys, comparators, probeRd, buildRd, probeHpc,
+                                buildHpc);
+                    }
+                    else{
+                    	state.hybridHJ = new OptimizedHybridHashJoin(ctx, state.memForJoin, state.numOfPartitions,
+                                PROBE_REL, BUILD_REL, probeKeys, buildKeys, comparators, probeRd, buildRd, probeHpc,
+                                buildHpc, isLeftOuter, nullWriterFactories1);
+                    }
+                    
                     state.hybridHJ.initBuild();
                 }
 
@@ -368,7 +372,9 @@
 
                 @Override
                 public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-                    state.hybridHJ.probe(buffer, writer);
+                	if(!state.hybridHJ.isTableEmpty()){
+                		state.hybridHJ.probe(buffer, writer);
+                	}
                 }
 
                 @Override
@@ -418,34 +424,40 @@
                     //Apply in-Mem HJ if possible
                     if ((buildPartSize < state.memForJoin) || (probePartSize < state.memForJoin)) {
                         int tabSize = -1;
-                        if (buildPartSize < probePartSize) {
+                        
+                        if (isLeftOuter || buildPartSize < probePartSize) {
                             tabSize = ohhj.getBuildPartitionSizeInTup(pid);
+                           
                             if (tabSize == 0) {
                                 throw new HyracksDataException(
                                         "Trying to join an empty partition. Invalid table size for inMemoryHashJoin.");
                             }
-                            //Build Side is smaller
-                            applyInMemHashJoin(probeKeys, buildKeys, tabSize, probeRd, buildRd, hpcRep1, hpcRep0,
-                                    buildSideReader, probeSideReader);
+                          //Build Side is smaller
+                            applyInMemHashJoin(buildKeys, probeKeys, tabSize, probeRd, buildRd, hpcRep0, hpcRep1,
+                                    buildSideReader, probeSideReader, false, pid);
 
-                        } else { //Role Reversal
+                        } 
+                        
+                        else { //Role Reversal
                             tabSize = ohhj.getProbePartitionSizeInTup(pid);
                             if (tabSize == 0) {
                                 throw new HyracksDataException(
                                         "Trying to join an empty partition. Invalid table size for inMemoryHashJoin.");
                             }
                             //Probe Side is smaller
-                            applyInMemHashJoin(buildKeys, probeKeys, tabSize, buildRd, probeRd, hpcRep0, hpcRep1,
-                                    probeSideReader, buildSideReader);
+                            
+                            applyInMemHashJoin(probeKeys, buildKeys, tabSize, buildRd, probeRd, hpcRep1, hpcRep0,
+                                    probeSideReader, buildSideReader, true, pid);
                         }
                     }
                     //Apply (Recursive) HHJ
                     else {
                         OptimizedHybridHashJoin rHHj;
-                        if (buildPartSize < probePartSize) { //Build Side is smaller
+                        if (isLeftOuter || buildPartSize < probePartSize) { //Build Side is smaller
 
                             int n = getNumberOfPartitions(state.memForJoin, (int) buildPartSize, fudgeFactor,
                                     nPartitions);
+                           
                             rHHj = new OptimizedHybridHashJoin(ctx, state.memForJoin, n, PROBE_REL, BUILD_REL,
                                     probeKeys, buildKeys, comparators, probeRd, buildRd, probeHpc, buildHpc);
 
@@ -488,14 +500,14 @@
                                 for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
                                     RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
                                     RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
-
+                                    
                                     if (rbrfw == null || rprfw == null) {
                                         continue;
                                     }
 
                                     int buildSideInTups = rHHj.getBuildPartitionSizeInTup(rPid);
                                     int probeSideInTups = rHHj.getProbePartitionSizeInTup(rPid);
-                                    if (buildSideInTups < probeSideInTups) {
+                                    if (isLeftOuter || buildSideInTups < probeSideInTups) {
                                         applyNestedLoopJoin(probeRd, buildRd, state.memForJoin, rbrfw, rprfw,
                                                 nljComparator0);
                                     } else {
@@ -507,6 +519,7 @@
                         } else { //Role Reversal (Probe Side is smaller)
                             int n = getNumberOfPartitions(state.memForJoin, (int) probePartSize, fudgeFactor,
                                     nPartitions);
+                            
                             rHHj = new OptimizedHybridHashJoin(ctx, state.memForJoin, n, BUILD_REL, PROBE_REL,
                                     buildKeys, probeKeys, comparators, buildRd, probeRd, buildHpc, probeHpc);
 
@@ -545,7 +558,7 @@
                                 for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
                                     RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
                                     RunFileReader rprfw = rHHj.getProbeRFReader(rPid);
-
+                                    
                                     if (rbrfw == null || rprfw == null) {
                                         continue;
                                     }
@@ -569,14 +582,14 @@
 
                 private void applyInMemHashJoin(int[] bKeys, int[] pKeys, int tabSize, RecordDescriptor buildRDesc,
                         RecordDescriptor probeRDesc, ITuplePartitionComputer hpcRepLarger,
-                        ITuplePartitionComputer hpcRepSmaller, RunFileReader bReader, RunFileReader pReader)
+                        ITuplePartitionComputer hpcRepSmaller, RunFileReader bReader, RunFileReader pReader, boolean reverse, int pid)
                         throws HyracksDataException {
 
                     ISerializableTable table = new SerializableHashTable(tabSize, ctx);
                     InMemoryHashJoin joiner = new InMemoryHashJoin(ctx, tabSize, new FrameTupleAccessor(
                             ctx.getFrameSize(), probeRDesc), hpcRepLarger, new FrameTupleAccessor(ctx.getFrameSize(),
                             buildRDesc), hpcRepSmaller, new FrameTuplePairComparator(pKeys, bKeys, comparators),
-                            isLeftOuter, nullWriters1, table);
+                            isLeftOuter, nullWriters1, table, reverse);
 
                     bReader.open();
                     rPartbuff.clear();
@@ -604,7 +617,7 @@
                         throws HyracksDataException {
 
                     NestedLoopJoin nlj = new NestedLoopJoin(ctx, new FrameTupleAccessor(ctx.getFrameSize(), outerRd),
-                            new FrameTupleAccessor(ctx.getFrameSize(), innerRd), nljComparator, memorySize);
+                            new FrameTupleAccessor(ctx.getFrameSize(), innerRd), nljComparator, memorySize, false, null);
 
                     ByteBuffer cacheBuff = ctx.allocateFrame();
                     innerReader.open();
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/result/ResultWriterOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/result/ResultWriterOperatorDescriptor.java
new file mode 100644
index 0000000..edca60a
--- /dev/null
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/result/ResultWriterOperatorDescriptor.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.std.result;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.IResultSerializer;
+import edu.uci.ics.hyracks.api.dataflow.value.IResultSerializerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameOutputStream;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+
+public class ResultWriterOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+
+    private final ResultSetId rsId;
+
+    private final boolean ordered;
+
+    private final IResultSerializerFactory resultSerializerFactory;
+
+    public ResultWriterOperatorDescriptor(IOperatorDescriptorRegistry spec, ResultSetId rsId, boolean ordered,
+            IResultSerializerFactory resultSerializerFactory) throws IOException {
+        super(spec, 1, 0);
+        this.rsId = rsId;
+        this.ordered = ordered;
+        this.resultSerializerFactory = resultSerializerFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) {
+        final IDatasetPartitionManager dpm = ctx.getDatasetPartitionManager();
+
+        final ByteBuffer outputBuffer = ctx.allocateFrame();
+
+        final FrameOutputStream frameOutputStream = new FrameOutputStream(ctx.getFrameSize());
+        frameOutputStream.reset(outputBuffer, true);
+        PrintStream printStream = new PrintStream(frameOutputStream);
+
+        final RecordDescriptor outRecordDesc = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
+        final IResultSerializer resultSerializer = resultSerializerFactory.createResultSerializer(outRecordDesc,
+                printStream);
+
+        final FrameTupleAccessor frameTupleAccessor = new FrameTupleAccessor(ctx.getFrameSize(), outRecordDesc);
+
+        return new AbstractUnaryInputSinkOperatorNodePushable() {
+            IFrameWriter datasetPartitionWriter;
+
+            @Override
+            public void open() throws HyracksDataException {
+                try {
+                    datasetPartitionWriter = dpm.createDatasetPartitionWriter(ctx, rsId, ordered, partition,
+                            nPartitions);
+                    datasetPartitionWriter.open();
+                    resultSerializer.init();
+                } catch (HyracksException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                frameTupleAccessor.reset(buffer);
+                for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
+                    resultSerializer.appendTuple(frameTupleAccessor, tIndex);
+                    if (!frameOutputStream.appendTuple()) {
+                        datasetPartitionWriter.nextFrame(outputBuffer);
+                        frameOutputStream.reset(outputBuffer, true);
+
+                        /* TODO(madhusudancs): This works under the assumption that no single serialized record is
+                         * longer than the buffer size.
+                         */
+                        resultSerializer.appendTuple(frameTupleAccessor, tIndex);
+                        frameOutputStream.appendTuple();
+                    }
+                }
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+                datasetPartitionWriter.fail();
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                if (frameOutputStream.getTupleCount() > 0) {
+                    datasetPartitionWriter.nextFrame(outputBuffer);
+                    frameOutputStream.reset(outputBuffer, true);
+                }
+                datasetPartitionWriter.close();
+            }
+        };
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-dist/pom.xml b/hyracks/hyracks-dist/pom.xml
new file mode 100755
index 0000000..58a4b1c
--- /dev/null
+++ b/hyracks/hyracks-dist/pom.xml
@@ -0,0 +1,74 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<artifactId>hyracks</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<artifactId>hyracks-dist</artifactId>
+	<name>hyracks-dist</name>
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-resources-plugin</artifactId>
+				<version>2.5</version>
+				<executions>
+					<execution>
+						<id>copy-scripts</id>
+						<!-- here the phase you need -->
+						<phase>package</phase>
+						<goals>
+							<goal>copy-resources</goal>
+						</goals>
+						<configuration>
+							<outputDirectory>target/appassembler/</outputDirectory>
+							<resources>
+								<resource>
+									<directory>src/main/resources</directory>
+								</resource>
+							</resources>
+							<directoryMode>0755</directoryMode>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-antrun-plugin</artifactId>
+				<version>1.6</version>
+				<executions>
+					<execution>
+						<id>process-test-classes</id>
+						<phase>package</phase>
+						<configuration>
+							<target>
+								<chmod file="target/appassembler/bin/*)" perm="755" />
+							</target>
+						</configuration>
+						<goals>
+							<goal>run</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+</project>
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/getip.sh b/hyracks/hyracks-dist/src/main/resources/bin/getip.sh
new file mode 100755
index 0000000..a691c0f
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/getip.sh
@@ -0,0 +1,25 @@
+#get the OS
+OS_NAME=`uname -a|awk '{print $1}'`
+LINUX_OS='Linux'
+
+if [ $OS_NAME = $LINUX_OS ];
+then
+        #Get IP Address
+        IPADDR=`/sbin/ifconfig eth0 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+    	if [ "$IPADDR" = "" ]
+        then
+		IPADDR=`/sbin/ifconfig em1 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+        fi 
+	if [ "$IPADDR" = "" ]
+        then
+		IPADDR=`/sbin/ifconfig lo | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+        fi 
+else
+        IPADDR=`/sbin/ifconfig en1 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+	if [ "$IPADDR" = "" ]
+        then
+                IPADDR=`/sbin/ifconfig lo0 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+        fi
+
+fi
+echo $IPADDR
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/startAllNCs.sh b/hyracks/hyracks-dist/src/main/resources/bin/startAllNCs.sh
new file mode 100755
index 0000000..629bd90
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/startAllNCs.sh
@@ -0,0 +1,6 @@
+PREGELIX_PATH=`pwd`
+
+for i in `cat conf/slaves`
+do
+   ssh $i "cd ${PREGELIX_PATH}; bin/startnc.sh"
+done
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/startCluster.sh b/hyracks/hyracks-dist/src/main/resources/bin/startCluster.sh
new file mode 100755
index 0000000..a0c2063
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/startCluster.sh
@@ -0,0 +1,3 @@
+bin/startcc.sh
+sleep 5
+bin/startAllNCs.sh
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/startDebugNc.sh b/hyracks/hyracks-dist/src/main/resources/bin/startDebugNc.sh
new file mode 100755
index 0000000..fe6cf27
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/startDebugNc.sh
@@ -0,0 +1,50 @@
+hostname
+
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CURRENT_PATH=`pwd`
+CCHOST=`ssh ${CCHOST_NAME} "cd ${CURRENT_PATH}; bin/getip.sh"`
+
+#Import cluster properties
+. conf/cluster.properties
+. conf/debugnc.properties
+
+#Clean up temp dir
+
+rm -rf $NCTMP_DIR2
+mkdir $NCTMP_DIR2
+
+#Clean up log dir
+rm -rf $NCLOGS_DIR2
+mkdir $NCLOGS_DIR2
+
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS2 | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir
+	mkdir $io_dir
+done
+
+#Set JAVA_HOME
+export JAVA_HOME=$JAVA_HOME
+
+#Get OS
+IPADDR=`bin/getip.sh`
+
+#Get node ID
+NODEID=`hostname | cut -d '.' -f 1`
+NODEID=${NODEID}2
+
+#Set JAVA_OPTS
+export JAVA_OPTS=$NCJAVA_OPTS2
+
+cd $HYRACKS_HOME
+HYRACKS_HOME=`pwd`
+
+#Enter the temp dir
+cd $NCTMP_DIR2
+
+#Launch hyracks nc
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR  -data-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS2}" &> $NCLOGS_DIR2/$NODEID.log &
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/startcc.sh b/hyracks/hyracks-dist/src/main/resources/bin/startcc.sh
new file mode 100755
index 0000000..484ecac
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/startcc.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+hostname
+
+#Import cluster properties
+. conf/cluster.properties
+
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CCHOST=`bin/getip.sh`
+
+#Remove the temp dir
+rm -rf $CCTMP_DIR
+mkdir $CCTMP_DIR
+
+#Remove the logs dir
+rm -rf $CCLOGS_DIR
+mkdir $CCLOGS_DIR
+
+#Export JAVA_HOME and JAVA_OPTS
+export JAVA_HOME=$JAVA_HOME
+export JAVA_OPTS=$CCJAVA_OPTS
+
+#Launch hyracks cc script
+chmod -R 755 $HYRACKS_HOME
+if [ -f "conf/topology.xml"  ]; then
+#Launch hyracks cc script with topology
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 -cluster-topology "conf/topology.xml" &> $CCLOGS_DIR/cc.log &
+else
+#Launch hyracks cc script without toplogy
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 &> $CCLOGS_DIR/cc.log &
+fi
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/startnc.sh b/hyracks/hyracks-dist/src/main/resources/bin/startnc.sh
new file mode 100755
index 0000000..23a4c36
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/startnc.sh
@@ -0,0 +1,49 @@
+hostname
+
+MY_NAME=`hostname`
+#Get the IP address of the cc
+CCHOST_NAME=`cat conf/master`
+CURRENT_PATH=`pwd`
+CCHOST=`ssh ${CCHOST_NAME} "cd ${CURRENT_PATH}; bin/getip.sh"`
+
+#Import cluster properties
+. conf/cluster.properties
+
+#Clean up temp dir
+
+rm -rf $NCTMP_DIR
+mkdir $NCTMP_DIR
+
+#Clean up log dir
+rm -rf $NCLOGS_DIR
+mkdir $NCLOGS_DIR
+
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir
+	mkdir $io_dir
+done
+
+#Set JAVA_HOME
+export JAVA_HOME=$JAVA_HOME
+
+IPADDR=`bin/getip.sh`
+#echo $IPADDR
+
+#Get node ID
+NODEID=`hostname | cut -d '.' -f 1`
+
+#Set JAVA_OPTS
+export JAVA_OPTS=$NCJAVA_OPTS
+
+cd $HYRACKS_HOME
+HYRACKS_HOME=`pwd`
+
+#Enter the temp dir
+cd $NCTMP_DIR
+
+#Launch hyracks nc
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR  -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/stopAllNCs.sh b/hyracks/hyracks-dist/src/main/resources/bin/stopAllNCs.sh
new file mode 100755
index 0000000..12367c1
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/stopAllNCs.sh
@@ -0,0 +1,6 @@
+PREGELIX_PATH=`pwd`
+
+for i in `cat conf/slaves`
+do
+   ssh $i "cd ${PREGELIX_PATH}; bin/stopnc.sh"
+done
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/stopCluster.sh b/hyracks/hyracks-dist/src/main/resources/bin/stopCluster.sh
new file mode 100755
index 0000000..4889934
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/stopCluster.sh
@@ -0,0 +1,3 @@
+bin/stopAllNCs.sh
+sleep 2
+bin/stopcc.sh
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/stopcc.sh b/hyracks/hyracks-dist/src/main/resources/bin/stopcc.sh
new file mode 100755
index 0000000..c2f525a
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/stopcc.sh
@@ -0,0 +1,10 @@
+hostname
+. conf/cluster.properties
+
+#Kill process
+PID=`ps -ef|grep ${USER}|grep java|grep hyracks|awk '{print $2}'`
+echo $PID
+kill -9 $PID
+
+#Clean up CC temp dir
+rm -rf $CCTMP_DIR/*
diff --git a/hyracks/hyracks-dist/src/main/resources/bin/stopnc.sh b/hyracks/hyracks-dist/src/main/resources/bin/stopnc.sh
new file mode 100755
index 0000000..35c4794
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/bin/stopnc.sh
@@ -0,0 +1,27 @@
+hostname
+. conf/cluster.properties
+
+#Kill process
+PID=`ps -ef|grep ${USER}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+
+if [ "$PID" == "" ]; then
+  PID=`ps -ef|grep ${USER}|grep java|grep 'hyracks'|awk '{print $2}'`
+fi
+
+if [ "$PID" == "" ]; then
+  USERID=`id | sed 's/^uid=//;s/(.*$//'`
+  PID=`ps -ef|grep ${USERID}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+fi
+
+echo $PID
+kill -9 $PID
+
+#Clean up I/O working dir
+io_dirs=$(echo $IO_DIRS | tr "," "\n")
+for io_dir in $io_dirs
+do
+	rm -rf $io_dir/*
+done
+
+#Clean up NC temp dir
+rm -rf $NCTMP_DIR/*
diff --git a/hyracks/hyracks-dist/src/main/resources/conf/cluster.properties b/hyracks/hyracks-dist/src/main/resources/conf/cluster.properties
new file mode 100755
index 0000000..3b382f7
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/conf/cluster.properties
@@ -0,0 +1,37 @@
+#The CC port for Hyracks clients
+CC_CLIENTPORT=3099
+
+#The CC port for Hyracks cluster management
+CC_CLUSTERPORT=1099
+
+#The directory of hyracks binaries
+HYRACKS_HOME=../../../
+
+#The tmp directory for cc to install jars
+CCTMP_DIR=/tmp/t1
+
+#The tmp directory for nc to install jars
+NCTMP_DIR=/tmp/t2
+
+#The directory to put cc logs
+CCLOGS_DIR=$CCTMP_DIR/logs
+
+#The directory to put nc logs
+NCLOGS_DIR=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS="/tmp/t3,/tmp/t4"
+
+#The JAVA_HOME
+JAVA_HOME=$JAVA_HOME
+
+#The frame size of the internal dataflow engine
+FRAME_SIZE=65536
+
+#CC JAVA_OPTS
+CCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+# Yourkit option: -agentpath:/grid/0/dev/vborkar/tools/yjp-10.0.4/bin/linux-x86-64/libyjpagent.so=port=20001"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7002,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/hyracks/hyracks-dist/src/main/resources/conf/debugnc.properties b/hyracks/hyracks-dist/src/main/resources/conf/debugnc.properties
new file mode 100755
index 0000000..27afa26
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/conf/debugnc.properties
@@ -0,0 +1,12 @@
+#The tmp directory for nc to install jars
+NCTMP_DIR2=/tmp/t-1
+
+#The directory to put nc logs
+NCLOGS_DIR2=$NCTMP_DIR/logs
+
+#Comma separated I/O directories for the spilling of external sort
+IO_DIRS2="/tmp/t-2,/tmp/t-3"
+
+#NC JAVA_OPTS
+NCJAVA_OPTS2="-Xdebug -Xrunjdwp:transport=dt_socket,address=7003,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+
diff --git a/hyracks/hyracks-dist/src/main/resources/conf/master b/hyracks/hyracks-dist/src/main/resources/conf/master
new file mode 100755
index 0000000..2fbb50c
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/conf/master
@@ -0,0 +1 @@
+localhost
diff --git a/hyracks/hyracks-dist/src/main/resources/conf/slaves b/hyracks/hyracks-dist/src/main/resources/conf/slaves
new file mode 100755
index 0000000..2fbb50c
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/conf/slaves
@@ -0,0 +1 @@
+localhost
diff --git a/hyracks/hyracks-dist/src/main/resources/conf/topology-template.xml b/hyracks/hyracks-dist/src/main/resources/conf/topology-template.xml
new file mode 100755
index 0000000..4710706
--- /dev/null
+++ b/hyracks/hyracks-dist/src/main/resources/conf/topology-template.xml
@@ -0,0 +1,7 @@
+<cluster-topology>
+    <network-switch name="Global">
+        <network-switch name="local">
+            <terminal name="127.0.0.1"/>
+        </network-switch>
+    </network-switch>
+</cluster-topology>
\ No newline at end of file
diff --git a/hyracks/hyracks-documentation/pom.xml b/hyracks/hyracks-documentation/pom.xml
index ed24adb..7aedd57 100644
--- a/hyracks/hyracks-documentation/pom.xml
+++ b/hyracks/hyracks-documentation/pom.xml
@@ -1,8 +1,6 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
   <artifactId>hyracks-documentation</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <name>hyracks-documentation</name>
 
   <parent>
diff --git a/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml b/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml
index 6350054..792cedd 100644
--- a/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml
+++ b/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml
@@ -2,7 +2,6 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
   <artifactId>btreeapp</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <name>btreeapp</name>
 
   <parent>
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml b/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
index 82919ca..f941a5b 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
@@ -37,13 +37,15 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
         <executions>
           <execution>
             <configuration>
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml b/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
index 8e90606..eb651ce 100644
--- a/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
@@ -41,8 +41,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
index 70c663d..4cf9745 100644
--- a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
+++ b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
@@ -2,7 +2,6 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
   <artifactId>hadoopcompatapp</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <name>hadoopcompatapp</name>
 
   <parent>
@@ -147,8 +146,9 @@
       	<artifactId>maven-compiler-plugin</artifactId>
       	<version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
       <plugin>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml
index 2f0b00f..f52536c 100644
--- a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml
+++ b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml
@@ -2,7 +2,6 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
   <artifactId>hadoopcompatclient</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <name>hadoopcompatclient</name>
 
   <parent>
@@ -33,13 +32,14 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
         </configuration>
       </plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
         <executions>
           <execution>
             <configuration>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml
index 1e029be..c397a72 100644
--- a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml
+++ b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml
@@ -2,7 +2,6 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
   <artifactId>hadoopcompathelper</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <name>hadoopcompathelper</name>
 
   <parent>
@@ -32,8 +31,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
index a2cf5ae..16787ca 100644
--- a/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
+++ b/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
@@ -2,7 +2,6 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples</groupId>
   <artifactId>hadoop-compat-example</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <packaging>pom</packaging>
   <name>hadoop-compat-example</name>
 
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
index e31af75..5e7b5c9 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
@@ -16,8 +16,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
@@ -31,6 +32,13 @@
   		<scope>test</scope>
   	</dependency>
   	<dependency>
+        <groupId>edu.uci.ics.hyracks</groupId>
+        <artifactId>hyracks-client</artifactId>
+        <version>0.2.3-SNAPSHOT</version>
+        <type>jar</type>
+        <scope>compile</scope>
+    </dependency>
+  	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-dataflow-std</artifactId>
   		<version>0.2.3-SNAPSHOT</version>
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
index 8482083..33ddca2 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -44,8 +45,8 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
@@ -60,6 +61,7 @@
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class BTreePrimaryIndexScanOperatorTest extends AbstractIntegrationTest {
     static {
@@ -114,7 +116,7 @@
         spec.addRoot(primaryCreateOp);
         runTest(spec);
     }
-    
+
     public void loadPrimaryIndexTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
@@ -143,8 +145,9 @@
 
         int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
         TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
+                NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
@@ -179,13 +182,16 @@
         int[] highKeyFields = null; // + infinity
 
         BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, lowKeyFields,
-                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
+                NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
index 82fecbe..acd3027 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -44,8 +45,8 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
@@ -60,6 +61,7 @@
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class BTreePrimaryIndexSearchOperatorTest extends AbstractIntegrationTest {
     static {
@@ -114,7 +116,7 @@
         spec.addRoot(primaryCreateOp);
         runTest(spec);
     }
-    
+
     public void loadPrimaryIndexTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
@@ -143,8 +145,9 @@
 
         int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
         TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
+                NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
@@ -184,13 +187,16 @@
         int[] highKeyFields = { 1 };
 
         BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, lowKeyFields,
-                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
+                NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
index e63ce11..ca03b16 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
@@ -29,6 +29,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -42,7 +43,7 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
@@ -57,6 +58,7 @@
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class BTreePrimaryIndexStatsOperatorTest extends AbstractIntegrationTest {
     static {
@@ -82,7 +84,6 @@
     private IFileSplitProvider primaryBtreeSplitProvider = new ConstantFileSplitProvider(
             new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
 
-
     @Before
     public void setup() throws Exception {
         // field, type and key declarations for primary index
@@ -107,7 +108,7 @@
         spec.addRoot(primaryCreateOp);
         runTest(spec);
     }
-    
+
     public void loadPrimaryIndexTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
@@ -136,8 +137,9 @@
 
         int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
         TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
+                NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
@@ -153,12 +155,15 @@
         JobSpecification spec = new JobSpecification();
 
         TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryBtreeSplitProvider,
-                primaryTypeTraits, primaryComparatorFactories, dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryStatsOp, NC1_ID);
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), primaryStatsOp, 0, printer, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
index 3c87ae3..34a1cd3 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -44,9 +45,9 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
@@ -63,6 +64,7 @@
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class BTreeSecondaryIndexInsertOperatorTest extends AbstractIntegrationTest {
     static {
@@ -339,9 +341,11 @@
                 primaryHighKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
index 1304f12..c9ee118 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -44,8 +45,8 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
@@ -60,6 +61,7 @@
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class BTreeSecondaryIndexSearchOperatorTest extends AbstractIntegrationTest {
     static {
@@ -137,7 +139,7 @@
         spec.addRoot(primaryCreateOp);
         runTest(spec);
     }
-    
+
     public void loadPrimaryIndexTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
@@ -166,8 +168,9 @@
 
         int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
         TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
+                NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
@@ -180,14 +183,14 @@
 
     public void createSecondaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
-        TreeIndexCreateOperatorDescriptor secondaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        TreeIndexCreateOperatorDescriptor secondaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryCreateOp, NC1_ID);
         spec.addRoot(secondaryCreateOp);
         runTest(spec);
     }
-    
+
     public void loadSecondaryIndexTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
@@ -212,8 +215,9 @@
 
         // scan primary index
         BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, lowKeyFields,
-                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
+                NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
 
         // sort based on secondary keys
@@ -225,8 +229,9 @@
         // load secondary index
         int[] fieldPermutation = { 3, 0 };
         TreeIndexBulkLoadOperatorDescriptor secondaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories, fieldPermutation, 0.7f,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
+                NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeBulkLoad, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
@@ -268,8 +273,8 @@
         // search secondary index
         BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
                 secondaryRecDesc, storageManager, indexRegistryProvider, secondaryBtreeSplitProvider,
-                secondaryTypeTraits, secondaryComparatorFactories, secondaryLowKeyFields, secondaryHighKeyFields, true, true,
-                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+                secondaryTypeTraits, secondaryComparatorFactories, secondaryLowKeyFields, secondaryHighKeyFields, true,
+                true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeSearchOp, NC1_ID);
 
         int[] primaryLowKeyFields = { 1 }; // second field from the tuples
@@ -279,13 +284,16 @@
 
         // search primary index
         BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, primaryLowKeyFields,
-                primaryHighKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, primaryLowKeyFields, primaryHighKeyFields, true, true,
+                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
index 023bdd9..8893567 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -14,29 +14,39 @@
  */
 package edu.uci.ics.hyracks.tests.integration;
 
+import java.io.BufferedReader;
 import java.io.File;
+import java.io.FileReader;
 import java.io.IOException;
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.commons.io.FileUtils;
 import org.junit.AfterClass;
+import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.rules.TemporaryFolder;
 
 import edu.uci.ics.hyracks.api.client.HyracksConnection;
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetReader;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.job.JobFlag;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.client.dataset.HyracksDataset;
 import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
 import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
 import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
 import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
 
 public abstract class AbstractIntegrationTest {
     private static final Logger LOGGER = Logger.getLogger(AbstractIntegrationTest.class.getName());
@@ -80,6 +90,7 @@
         ncConfig1.ccPort = 39001;
         ncConfig1.clusterNetIPAddress = "127.0.0.1";
         ncConfig1.dataIPAddress = "127.0.0.1";
+        ncConfig1.datasetIPAddress = "127.0.0.1";
         ncConfig1.nodeId = NC1_ID;
         nc1 = new NodeControllerService(ncConfig1);
         nc1.start();
@@ -89,6 +100,7 @@
         ncConfig2.ccPort = 39001;
         ncConfig2.clusterNetIPAddress = "127.0.0.1";
         ncConfig2.dataIPAddress = "127.0.0.1";
+        ncConfig2.datasetIPAddress = "127.0.0.1";
         ncConfig2.nodeId = NC2_ID;
         nc2 = new NodeControllerService(ncConfig2);
         nc2.start();
@@ -107,7 +119,7 @@
         cc.stop();
     }
 
-    protected void runTest(JobSpecification spec) throws Exception {
+    protected JobId executeTest(JobSpecification spec) throws Exception {
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info(spec.toJSON().toString(2));
         }
@@ -115,25 +127,72 @@
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info(jobId.toString());
         }
-        hcc.waitForCompletion(jobId);
-        dumpOutputFiles();
+        return jobId;
     }
 
-    private void dumpOutputFiles() {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            for (File f : outputFiles) {
-                if (f.exists() && f.isFile()) {
-                    try {
-                        LOGGER.info("Reading file: " + f.getAbsolutePath() + " in test: " + getClass().getName());
-                        String data = FileUtils.readFileToString(f);
-                        LOGGER.info(data);
-                    } catch (IOException e) {
-                        LOGGER.info("Error reading file: " + f.getAbsolutePath());
-                        LOGGER.info(e.getMessage());
-                    }
+    protected void runTest(JobSpecification spec) throws Exception {
+        JobId jobId = executeTest(spec);
+        hcc.waitForCompletion(jobId);
+    }
+
+    protected List<String> readResults(JobSpecification spec, JobId jobId, ResultSetId resultSetId) throws Exception {
+        int nReaders = 1;
+        ByteBuffer resultBuffer = ByteBuffer.allocate(spec.getFrameSize());
+        resultBuffer.clear();
+
+        IFrameTupleAccessor frameTupleAccessor = new ResultFrameTupleAccessor(spec.getFrameSize());
+
+        IHyracksDataset hyracksDataset = new HyracksDataset(hcc, spec.getFrameSize(), nReaders);
+        IHyracksDatasetReader reader = hyracksDataset.createReader(jobId, resultSetId);
+
+        List<String> resultRecords = new ArrayList<String>();
+        ByteBufferInputStream bbis = new ByteBufferInputStream();
+
+        int readSize = reader.read(resultBuffer);
+
+        while (readSize > 0) {
+
+            try {
+                frameTupleAccessor.reset(resultBuffer);
+                for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
+                    int start = frameTupleAccessor.getTupleStartOffset(tIndex);
+                    int length = frameTupleAccessor.getTupleEndOffset(tIndex) - start;
+                    bbis.setByteBuffer(resultBuffer, start);
+                    byte[] recordBytes = new byte[length];
+                    bbis.read(recordBytes, 0, length);
+                    resultRecords.add(new String(recordBytes, 0, length));
                 }
+            } finally {
+                bbis.close();
             }
+
+            resultBuffer.clear();
+            readSize = reader.read(resultBuffer);
         }
+        return resultRecords;
+    }
+
+    protected boolean runTestAndCompareResults(JobSpecification spec, String[] expectedFileNames) throws Exception {
+        JobId jobId = executeTest(spec);
+
+        List<String> results;
+        for (int i = 0; i < expectedFileNames.length; i++) {
+            results = readResults(spec, jobId, spec.getResultSetIds().get(i));
+            BufferedReader expectedFile = new BufferedReader(new FileReader(expectedFileNames[i]));
+
+            String expectedLine, actualLine;
+            int j = 0;
+            while ((expectedLine = expectedFile.readLine()) != null) {
+                actualLine = results.get(j).trim();
+                Assert.assertEquals(expectedLine, actualLine);
+                j++;
+            }
+            Assert.assertEquals(j, results.size());
+            expectedFile.close();
+        }
+
+        hcc.waitForCompletion(jobId);
+        return true;
     }
 
     protected File createTempFile() throws IOException {
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
index e0b8c73..24d0ef4 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
@@ -16,6 +16,7 @@
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.List;
@@ -23,6 +24,7 @@
 import java.util.logging.Logger;
 
 import org.apache.commons.io.FileUtils;
+import org.json.JSONArray;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Rule;
@@ -30,13 +32,20 @@
 
 import edu.uci.ics.hyracks.api.client.HyracksConnection;
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetReader;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.job.JobFlag;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.client.dataset.HyracksDataset;
 import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
 import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
 import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
 import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
 
 public abstract class AbstractMultiNCIntegrationTest {
 
@@ -84,6 +93,7 @@
             ncConfig.ccPort = 39001;
             ncConfig.clusterNetIPAddress = "127.0.0.1";
             ncConfig.dataIPAddress = "127.0.0.1";
+            ncConfig.datasetIPAddress = "127.0.0.1";
             ncConfig.nodeId = ASTERIX_IDS[i];
             asterixNCs[i] = new NodeControllerService(ncConfig);
             asterixNCs[i].start();
@@ -112,6 +122,46 @@
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info(jobId.toString());
         }
+
+        int nReaders = 1;
+
+        ByteBuffer resultBuffer = ByteBuffer.allocate(spec.getFrameSize());
+        resultBuffer.clear();
+
+        IFrameTupleAccessor frameTupleAccessor = new ResultFrameTupleAccessor(spec.getFrameSize());
+
+        IHyracksDataset hyracksDataset = new HyracksDataset(hcc, spec.getFrameSize(), nReaders);
+        IHyracksDatasetReader reader = hyracksDataset.createReader(jobId, spec.getResultSetIds().get(0));
+
+        JSONArray resultRecords = new JSONArray();
+        ByteBufferInputStream bbis = new ByteBufferInputStream();
+
+        int readSize = reader.read(resultBuffer);
+
+        while (readSize > 0) {
+
+            try {
+                frameTupleAccessor.reset(resultBuffer);
+                for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
+                    int start = frameTupleAccessor.getTupleStartOffset(tIndex);
+                    int length = frameTupleAccessor.getTupleEndOffset(tIndex) - start;
+                    bbis.setByteBuffer(resultBuffer, start);
+                    byte[] recordBytes = new byte[length];
+                    bbis.read(recordBytes, 0, length);
+                    resultRecords.put(new String(recordBytes, 0, length));
+                }
+            } finally {
+                try {
+                    bbis.close();
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            resultBuffer.clear();
+            readSize = reader.read(resultBuffer);
+        }
+
         hcc.waitForCompletion(jobId);
         dumpOutputFiles();
     }
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AggregationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AggregationTest.java
index 93e1e9b..06751e3 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AggregationTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AggregationTest.java
@@ -25,8 +25,8 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
@@ -49,7 +49,6 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.group.HashSpillableTableFactory;
 import edu.uci.ics.hyracks.dataflow.std.group.IFieldAggregateDescriptorFactory;
 import edu.uci.ics.hyracks.dataflow.std.group.aggregators.AvgFieldGroupAggregatorFactory;
@@ -62,60 +61,42 @@
 import edu.uci.ics.hyracks.dataflow.std.group.external.ExternalGroupOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.group.hash.HashGroupOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.group.preclustered.PreclusteredGroupOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 /**
  *
  */
 public class AggregationTest extends AbstractIntegrationTest {
 
-    final IFileSplitProvider splitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC2_ID, new FileReference(new File(
-                    "data/tpch0.001/lineitem.tbl"))) });
+    final IFileSplitProvider splitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC2_ID,
+            new FileReference(new File("data/tpch0.001/lineitem.tbl"))) });
 
-    final RecordDescriptor desc = new RecordDescriptor(
-            new ISerializerDeserializer[] {
-                    UTF8StringSerializerDeserializer.INSTANCE,
-                    IntegerSerializerDeserializer.INSTANCE,
-                    IntegerSerializerDeserializer.INSTANCE,
-                    IntegerSerializerDeserializer.INSTANCE,
-                    IntegerSerializerDeserializer.INSTANCE,
-                    FloatSerializerDeserializer.INSTANCE,
-                    FloatSerializerDeserializer.INSTANCE,
-                    FloatSerializerDeserializer.INSTANCE,
-                    UTF8StringSerializerDeserializer.INSTANCE,
-                    UTF8StringSerializerDeserializer.INSTANCE,
-                    UTF8StringSerializerDeserializer.INSTANCE,
-                    UTF8StringSerializerDeserializer.INSTANCE,
-                    UTF8StringSerializerDeserializer.INSTANCE,
-                    UTF8StringSerializerDeserializer.INSTANCE,
-                    UTF8StringSerializerDeserializer.INSTANCE,
-                    UTF8StringSerializerDeserializer.INSTANCE });
+    final RecordDescriptor desc = new RecordDescriptor(new ISerializerDeserializer[] {
+            UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+            IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+            IntegerSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE,
+            FloatSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
 
-    final ITupleParserFactory tupleParserFactory = new DelimitedDataTupleParserFactory(
-            new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                    IntegerParserFactory.INSTANCE,
-                    IntegerParserFactory.INSTANCE,
-                    IntegerParserFactory.INSTANCE,
-                    IntegerParserFactory.INSTANCE, FloatParserFactory.INSTANCE,
-                    FloatParserFactory.INSTANCE, FloatParserFactory.INSTANCE,
-                    UTF8StringParserFactory.INSTANCE,
-                    UTF8StringParserFactory.INSTANCE,
-                    UTF8StringParserFactory.INSTANCE,
-                    UTF8StringParserFactory.INSTANCE,
-                    UTF8StringParserFactory.INSTANCE,
-                    UTF8StringParserFactory.INSTANCE,
-                    UTF8StringParserFactory.INSTANCE,
-                    UTF8StringParserFactory.INSTANCE, }, '|');
+    final ITupleParserFactory tupleParserFactory = new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
+            UTF8StringParserFactory.INSTANCE, IntegerParserFactory.INSTANCE, IntegerParserFactory.INSTANCE,
+            IntegerParserFactory.INSTANCE, IntegerParserFactory.INSTANCE, FloatParserFactory.INSTANCE,
+            FloatParserFactory.INSTANCE, FloatParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+            UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+            UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+            UTF8StringParserFactory.INSTANCE, }, '|');
 
-    private AbstractSingleActivityOperatorDescriptor getPrinter(
-            IOperatorDescriptorRegistry spec, String prefix) throws IOException {
+    private AbstractSingleActivityOperatorDescriptor getPrinter(JobSpecification spec, String prefix)
+            throws IOException {
 
-        AbstractSingleActivityOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(
-                spec, new ConstantFileSplitProvider(new FileSplit[] {
-                        new FileSplit(NC1_ID, createTempFile()
-                                .getAbsolutePath()),
-                        new FileSplit(NC2_ID, createTempFile()
-                                .getAbsolutePath()) }), "\t");
+        ResultSetId rsId = new ResultSetId(1);
+        AbstractSingleActivityOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        spec.addResultSetId(rsId);
 
         return printer;
     }
@@ -124,54 +105,38 @@
     public void singleKeySumInmemGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        FloatSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 0 };
         int tableSize = 8;
 
-        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(spec, keyFields,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }),
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                        .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new IntSumFieldAggregatorFactory(3, true),
-                                new FloatSumFieldAggregatorFactory(5, true) }),
-                outputRec, tableSize);
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true), new IntSumFieldAggregatorFactory(3, true),
+                        new FloatSumFieldAggregatorFactory(5, true) }), outputRec, tableSize);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "singleKeySumInmemGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "singleKeySumInmemGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -184,49 +149,34 @@
     public void singleKeySumPreClusterGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        FloatSerializerDeserializer.INSTANCE});
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 0 };
 
-        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                        .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new IntSumFieldAggregatorFactory(3, true),
-                                new FloatSumFieldAggregatorFactory(5, true)}),
-                outputRec);
+        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true), new IntSumFieldAggregatorFactory(3, true),
+                        new FloatSumFieldAggregatorFactory(5, true) }), outputRec);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "singleKeySumInmemGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "singleKeySumInmemGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -239,64 +189,43 @@
     public void singleKeySumExtGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        FloatSerializerDeserializer.INSTANCE});
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 0 };
         int frameLimits = 4;
         int tableSize = 8;
 
-        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                frameLimits,
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                        .of(UTF8StringPointable.FACTORY) },
-                new UTF8StringNormalizedKeyComputerFactory(),
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, false),
+        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, keyFields, frameLimits,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new UTF8StringNormalizedKeyComputerFactory(), new MultiFieldsAggregatorFactory(
+                        new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
                                 new IntSumFieldAggregatorFactory(3, false),
-                                new FloatSumFieldAggregatorFactory(5, false)}),
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, false),
+                                new FloatSumFieldAggregatorFactory(5, false) }), new MultiFieldsAggregatorFactory(
+                        new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
                                 new IntSumFieldAggregatorFactory(2, false),
-                                new FloatSumFieldAggregatorFactory(3, false)}),
-                outputRec,
-                new HashSpillableTableFactory(
-                        new FieldHashPartitionComputerFactory(
-                                keyFields,
-                                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }),
-                        tableSize), true);
+                                new FloatSumFieldAggregatorFactory(3, false) }), outputRec,
+                new HashSpillableTableFactory(new FieldHashPartitionComputerFactory(keyFields,
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }), tableSize), true);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "singleKeySumExtGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "singleKeySumExtGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -309,54 +238,38 @@
     public void singleKeyAvgInmemGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        FloatSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 0 };
         int tableSize = 8;
 
-        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(spec, keyFields,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }),
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                        .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new CountFieldAggregatorFactory(true),
-                                new AvgFieldGroupAggregatorFactory(1, true) }),
-                outputRec, tableSize);
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true), new CountFieldAggregatorFactory(true),
+                        new AvgFieldGroupAggregatorFactory(1, true) }), outputRec, tableSize);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "singleKeyAvgInmemGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "singleKeyAvgInmemGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -369,49 +282,34 @@
     public void singleKeyAvgPreClusterGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        FloatSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 0 };
 
-        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                        .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new CountFieldAggregatorFactory(true),
-                                new AvgFieldGroupAggregatorFactory(1, true) }),
-                outputRec);
+        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true), new CountFieldAggregatorFactory(true),
+                        new AvgFieldGroupAggregatorFactory(1, true) }), outputRec);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "singleKeyAvgInmemGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "singleKeyAvgInmemGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -424,64 +322,43 @@
     public void singleKeyAvgExtGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        FloatSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 0 };
         int frameLimits = 4;
         int tableSize = 8;
 
-        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                frameLimits,
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                        .of(UTF8StringPointable.FACTORY) },
+        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, keyFields, frameLimits,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 new UTF8StringNormalizedKeyComputerFactory(),
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, false),
-                                new CountFieldAggregatorFactory(false),
-                                new AvgFieldGroupAggregatorFactory(1, false) }),
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, false),
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, false), new CountFieldAggregatorFactory(false),
+                        new AvgFieldGroupAggregatorFactory(1, false) }), new MultiFieldsAggregatorFactory(
+                        new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
                                 new IntSumFieldAggregatorFactory(2, false),
-                                new AvgFieldMergeAggregatorFactory(3, false) }),
-                outputRec,
-                new HashSpillableTableFactory(
-                        new FieldHashPartitionComputerFactory(
-                                keyFields,
-                                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }),
-                        tableSize), true);
+                                new AvgFieldMergeAggregatorFactory(3, false) }), outputRec,
+                new HashSpillableTableFactory(new FieldHashPartitionComputerFactory(keyFields,
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }), tableSize), true);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "singleKeyAvgExtGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "singleKeyAvgExtGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -494,52 +371,38 @@
     public void singleKeyMinMaxStringInmemGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 0 };
         int tableSize = 8;
 
-        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(spec, keyFields,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }),
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                        .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new MinMaxStringFieldAggregatorFactory(15,
-                                        true, false) }), outputRec, tableSize);
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true),
+                        new MinMaxStringFieldAggregatorFactory(15, true, false) }), outputRec, tableSize);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "singleKeyAvgInmemGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "singleKeyAvgInmemGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -552,47 +415,34 @@
     public void singleKeyMinMaxStringPreClusterGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 0 };
 
-        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                        .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new MinMaxStringFieldAggregatorFactory(15,
-                                        true, false) }), outputRec);
+        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true),
+                        new MinMaxStringFieldAggregatorFactory(15, true, false) }), outputRec);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "singleKeyAvgInmemGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "singleKeyAvgInmemGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -605,63 +455,42 @@
     public void singleKeyMinMaxStringExtGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 0 };
         int frameLimits = 4;
         int tableSize = 8;
 
-        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                frameLimits,
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
-                        .of(UTF8StringPointable.FACTORY) },
-                new UTF8StringNormalizedKeyComputerFactory(),
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, false),
-                                new MinMaxStringFieldAggregatorFactory(15,
-                                        true, true) }),
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, false),
-                                new MinMaxStringFieldAggregatorFactory(2, true,
-                                        true) }),
-                outputRec,
-                new HashSpillableTableFactory(
-                        new FieldHashPartitionComputerFactory(
-                                keyFields,
-                                new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }),
-                        tableSize), true);
+        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, keyFields, frameLimits,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new UTF8StringNormalizedKeyComputerFactory(), new MultiFieldsAggregatorFactory(
+                        new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
+                                new MinMaxStringFieldAggregatorFactory(15, true, true) }),
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, false),
+                        new MinMaxStringFieldAggregatorFactory(2, true, true) }), outputRec,
+                new HashSpillableTableFactory(new FieldHashPartitionComputerFactory(keyFields,
+                        new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
+                                .of(UTF8StringPointable.FACTORY) }), tableSize), true);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec,
-                new FieldHashPartitionComputerFactory(
-                        keyFields,
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
                                 .of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "singleKeyAvgExtGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "singleKeyAvgExtGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -674,58 +503,39 @@
     public void multiKeySumInmemGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 8, 0 };
         int tableSize = 8;
 
-        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(
-                spec, keyFields, new FieldHashPartitionComputerFactory(
-                        keyFields,
-                        new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }),
-                new IBinaryComparatorFactory[] {
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new IntSumFieldAggregatorFactory(3, true) }),
+        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(spec, keyFields,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true), new IntSumFieldAggregatorFactory(3, true) }),
                 outputRec, tableSize);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec, new FieldHashPartitionComputerFactory(keyFields,
-                        new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }));
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "multiKeySumInmemGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "multiKeySumInmemGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -738,51 +548,35 @@
     public void multiKeySumPreClusterGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 8, 0 };
 
-        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(
-                spec, keyFields,
-                new IBinaryComparatorFactory[] {
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new IntSumFieldAggregatorFactory(3, true) }),
+        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true), new IntSumFieldAggregatorFactory(3, true) }),
                 outputRec);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec, new FieldHashPartitionComputerFactory(keyFields,
-                        new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }));
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "multiKeySumInmemGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "multiKeySumInmemGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -795,69 +589,43 @@
     public void multiKeySumExtGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 8, 0 };
         int frameLimits = 4;
         int tableSize = 8;
 
-        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                frameLimits,
-                new IBinaryComparatorFactory[] {
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY) },
-                new UTF8StringNormalizedKeyComputerFactory(),
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, false),
-                                new IntSumFieldAggregatorFactory(3, false) }),
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(2, false),
-                                new IntSumFieldAggregatorFactory(3, false) }),
-                outputRec,
-                new HashSpillableTableFactory(
-                        new FieldHashPartitionComputerFactory(
-                                keyFields,
-                                new IBinaryHashFunctionFactory[] {
-                                        PointableBinaryHashFunctionFactory
-                                                .of(UTF8StringPointable.FACTORY),
-                                        PointableBinaryHashFunctionFactory
-                                                .of(UTF8StringPointable.FACTORY) }),
-                        tableSize), true);
-
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
-
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec, new FieldHashPartitionComputerFactory(keyFields,
+        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, keyFields, frameLimits,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new UTF8StringNormalizedKeyComputerFactory(), new MultiFieldsAggregatorFactory(
+                        new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
+                                new IntSumFieldAggregatorFactory(3, false) }), new MultiFieldsAggregatorFactory(
+                        new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(2, false),
+                                new IntSumFieldAggregatorFactory(3, false) }), outputRec,
+                new HashSpillableTableFactory(new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }));
+                                PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                                PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }), tableSize), true);
+
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
+
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "multiKeySumExtGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "multiKeySumExtGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -870,60 +638,40 @@
     public void multiKeyAvgInmemGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        FloatSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                FloatSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 8, 0 };
         int tableSize = 8;
 
-        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(
-                spec, keyFields, new FieldHashPartitionComputerFactory(
-                        keyFields,
-                        new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }),
-                new IBinaryComparatorFactory[] {
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new CountFieldAggregatorFactory(true),
-                                new AvgFieldGroupAggregatorFactory(1, true) }),
-                outputRec, tableSize);
+        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(spec, keyFields,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true), new CountFieldAggregatorFactory(true),
+                        new AvgFieldGroupAggregatorFactory(1, true) }), outputRec, tableSize);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec, new FieldHashPartitionComputerFactory(keyFields,
-                        new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }));
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "multiKeyAvgInmemGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "multiKeyAvgInmemGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -936,53 +684,36 @@
     public void multiKeyAvgPreClusterGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        FloatSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                FloatSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 8, 0 };
 
-        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(
-                spec, keyFields,
-                new IBinaryComparatorFactory[] {
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new CountFieldAggregatorFactory(true),
-                                new AvgFieldGroupAggregatorFactory(1, true) }),
-                outputRec);
+        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true), new CountFieldAggregatorFactory(true),
+                        new AvgFieldGroupAggregatorFactory(1, true) }), outputRec);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec, new FieldHashPartitionComputerFactory(keyFields,
-                        new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }));
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "multiKeyAvgInmemGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "multiKeyAvgInmemGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -995,72 +726,46 @@
     public void multiKeyAvgExtGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        FloatSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                FloatSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 8, 0 };
         int frameLimits = 4;
         int tableSize = 8;
 
-        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                frameLimits,
-                new IBinaryComparatorFactory[] {
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY) },
+        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, keyFields, frameLimits,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
                 new UTF8StringNormalizedKeyComputerFactory(),
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, false),
-                                new CountFieldAggregatorFactory(false),
-                                new AvgFieldGroupAggregatorFactory(1, false) }),
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(2, false),
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, false), new CountFieldAggregatorFactory(false),
+                        new AvgFieldGroupAggregatorFactory(1, false) }), new MultiFieldsAggregatorFactory(
+                        new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(2, false),
                                 new IntSumFieldAggregatorFactory(3, false),
-                                new AvgFieldMergeAggregatorFactory(4, false) }),
-                outputRec,
-                new HashSpillableTableFactory(
-                        new FieldHashPartitionComputerFactory(
-                                keyFields,
-                                new IBinaryHashFunctionFactory[] {
-                                        PointableBinaryHashFunctionFactory
-                                                .of(UTF8StringPointable.FACTORY),
-                                        PointableBinaryHashFunctionFactory
-                                                .of(UTF8StringPointable.FACTORY) }),
-                        tableSize), true);
-
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
-
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec, new FieldHashPartitionComputerFactory(keyFields,
+                                new AvgFieldMergeAggregatorFactory(4, false) }), outputRec,
+                new HashSpillableTableFactory(new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }));
+                                PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                                PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }), tableSize), true);
+
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
+
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "multiKeyAvgExtGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "multiKeyAvgExtGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -1073,58 +778,39 @@
     public void multiKeyMinMaxStringInmemGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 8, 0 };
         int tableSize = 8;
 
-        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(
-                spec, keyFields, new FieldHashPartitionComputerFactory(
-                        keyFields,
-                        new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }),
-                new IBinaryComparatorFactory[] {
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new MinMaxStringFieldAggregatorFactory(15,
-                                        true, false) }), outputRec, tableSize);
+        HashGroupOperatorDescriptor grouper = new HashGroupOperatorDescriptor(spec, keyFields,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true),
+                        new MinMaxStringFieldAggregatorFactory(15, true, false) }), outputRec, tableSize);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec, new FieldHashPartitionComputerFactory(keyFields,
-                        new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }));
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "multiKeyMinMaxStringInmemGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "multiKeyMinMaxStringInmemGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -1137,51 +823,35 @@
     public void multiKeyMinMaxStringPreClusterGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 8, 0 };
 
-        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(
-                spec, keyFields,
-                new IBinaryComparatorFactory[] {
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, true),
-                                new MinMaxStringFieldAggregatorFactory(15,
-                                        true, false) }), outputRec);
+        PreclusteredGroupOperatorDescriptor grouper = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(1, true),
+                        new MinMaxStringFieldAggregatorFactory(15, true, false) }), outputRec);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
 
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec, new FieldHashPartitionComputerFactory(keyFields,
-                        new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }));
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "multiKeyMinMaxStringPreClusterGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "multiKeyMinMaxStringPreClusterGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
@@ -1194,71 +864,44 @@
     public void multiKeyMinMaxStringExtGroupTest() throws Exception {
         JobSpecification spec = new JobSpecification();
 
-        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
-                spec, splitProvider, tupleParserFactory, desc);
+        FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
+                desc);
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-                csvScanner, NC2_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
 
-        RecordDescriptor outputRec = new RecordDescriptor(
-                new ISerializerDeserializer[] {
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE,
-                        IntegerSerializerDeserializer.INSTANCE,
-                        UTF8StringSerializerDeserializer.INSTANCE });
+        RecordDescriptor outputRec = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
 
         int[] keyFields = new int[] { 8, 0 };
         int frameLimits = 4;
         int tableSize = 8;
 
-        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(
-                spec,
-                keyFields,
-                frameLimits,
-                new IBinaryComparatorFactory[] {
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory
-                                .of(UTF8StringPointable.FACTORY) },
-                new UTF8StringNormalizedKeyComputerFactory(),
-                new MultiFieldsAggregatorFactory(
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(1, false),
-                                new MinMaxStringFieldAggregatorFactory(15,
-                                        true, true) }),
-                new MultiFieldsAggregatorFactory(new int[] { 0, 1 },
-                        new IFieldAggregateDescriptorFactory[] {
-                                new IntSumFieldAggregatorFactory(2, false),
-                                new MinMaxStringFieldAggregatorFactory(3, true,
-                                        true) }),
-                outputRec,
-                new HashSpillableTableFactory(
-                        new FieldHashPartitionComputerFactory(
-                                keyFields,
-                                new IBinaryHashFunctionFactory[] {
-                                        PointableBinaryHashFunctionFactory
-                                                .of(UTF8StringPointable.FACTORY),
-                                        PointableBinaryHashFunctionFactory
-                                                .of(UTF8StringPointable.FACTORY) }),
-                        tableSize), true);
-
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper,
-                NC2_ID, NC1_ID);
-
-        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(
-                spec, new FieldHashPartitionComputerFactory(keyFields,
+        ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, keyFields, frameLimits,
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                new UTF8StringNormalizedKeyComputerFactory(), new MultiFieldsAggregatorFactory(
+                        new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
+                                new MinMaxStringFieldAggregatorFactory(15, true, true) }),
+                new MultiFieldsAggregatorFactory(new int[] { 0, 1 }, new IFieldAggregateDescriptorFactory[] {
+                        new IntSumFieldAggregatorFactory(2, false),
+                        new MinMaxStringFieldAggregatorFactory(3, true, true) }), outputRec,
+                new HashSpillableTableFactory(new FieldHashPartitionComputerFactory(keyFields,
                         new IBinaryHashFunctionFactory[] {
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY),
-                                PointableBinaryHashFunctionFactory
-                                        .of(UTF8StringPointable.FACTORY) }));
+                                PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                                PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }), tableSize), true);
+
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
+
+        IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
+                new FieldHashPartitionComputerFactory(keyFields, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
         spec.connect(conn1, csvScanner, 0, grouper, 0);
 
-        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec,
-                "multiKeyMinMaxStringExtGroupTest");
+        AbstractSingleActivityOperatorDescriptor printer = getPrinter(spec, "multiKeyMinMaxStringExtGroupTest");
 
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-                NC2_ID, NC1_ID);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
         spec.connect(conn2, grouper, 0, printer, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/CountOfCountsTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/CountOfCountsTest.java
index aea6126..5008991 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/CountOfCountsTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/CountOfCountsTest.java
@@ -25,6 +25,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -44,13 +45,14 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.group.IFieldAggregateDescriptorFactory;
 import edu.uci.ics.hyracks.dataflow.std.group.aggregators.CountFieldAggregatorFactory;
 import edu.uci.ics.hyracks.dataflow.std.group.aggregators.MultiFieldsAggregatorFactory;
 import edu.uci.ics.hyracks.dataflow.std.group.preclustered.PreclusteredGroupOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.InMemorySortOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class CountOfCountsTest extends AbstractIntegrationTest {
     @Test
@@ -76,12 +78,10 @@
 
         RecordDescriptor desc2 = new RecordDescriptor(new ISerializerDeserializer[] {
                 UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
-        PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(
-                spec,
-                new int[] { 0 },
+        PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 0 },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
-                desc2);
+                new MultiFieldsAggregatorFactory(
+                        new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc2);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group, NC1_ID);
 
         InMemorySortOperatorDescriptor sorter2 = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
@@ -91,13 +91,15 @@
         RecordDescriptor desc3 = new RecordDescriptor(new ISerializerDeserializer[] {
                 IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
         PreclusteredGroupOperatorDescriptor group2 = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 1 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, new MultiFieldsAggregatorFactory(
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(
                         new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc3);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group2, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        spec.addResultSetId(rsId);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
@@ -148,12 +150,10 @@
 
         RecordDescriptor desc2 = new RecordDescriptor(new ISerializerDeserializer[] {
                 UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
-        PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(
-                spec,
-                new int[] { 0 },
+        PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 0 },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
-                desc2);
+                new MultiFieldsAggregatorFactory(
+                        new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc2);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group, NC1_ID, NC2_ID, NC1_ID, NC2_ID);
 
         InMemorySortOperatorDescriptor sorter2 = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
@@ -163,13 +163,16 @@
         RecordDescriptor desc3 = new RecordDescriptor(new ISerializerDeserializer[] {
                 IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
         PreclusteredGroupOperatorDescriptor group2 = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 1 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, new MultiFieldsAggregatorFactory(
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(
                         new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc3);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group2, NC1_ID, NC2_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        spec.addResultSetId(rsId);
+
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
@@ -220,12 +223,10 @@
 
         RecordDescriptor desc2 = new RecordDescriptor(new ISerializerDeserializer[] {
                 UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
-        PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(
-                spec,
-                new int[] { 0 },
+        PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 0 },
                 new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
-                desc2);
+                new MultiFieldsAggregatorFactory(
+                        new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc2);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group, NC1_ID, NC2_ID, NC1_ID, NC2_ID);
 
         InMemorySortOperatorDescriptor sorter2 = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
@@ -235,13 +236,16 @@
         RecordDescriptor desc3 = new RecordDescriptor(new ISerializerDeserializer[] {
                 IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
         PreclusteredGroupOperatorDescriptor group2 = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 1 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, new MultiFieldsAggregatorFactory(
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
+                new MultiFieldsAggregatorFactory(
                         new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc3);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group2, NC1_ID, NC2_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        spec.addResultSetId(rsId);
+
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/LocalityAwareConnectorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/LocalityAwareConnectorTest.java
index 0d5a627..93ed2c7 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/LocalityAwareConnectorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/LocalityAwareConnectorTest.java
@@ -26,8 +26,8 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
@@ -52,12 +52,13 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.group.IFieldAggregateDescriptorFactory;
 import edu.uci.ics.hyracks.dataflow.std.group.aggregators.FloatSumFieldAggregatorFactory;
 import edu.uci.ics.hyracks.dataflow.std.group.aggregators.IntSumFieldAggregatorFactory;
 import edu.uci.ics.hyracks.dataflow.std.group.aggregators.MultiFieldsAggregatorFactory;
 import edu.uci.ics.hyracks.dataflow.std.group.hash.HashGroupOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class LocalityAwareConnectorTest extends AbstractMultiNCIntegrationTest {
 
@@ -200,13 +201,13 @@
         runTest(spec);
     }
 
-    private AbstractSingleActivityOperatorDescriptor getPrinter(IOperatorDescriptorRegistry spec, String prefix)
+    private AbstractSingleActivityOperatorDescriptor getPrinter(JobSpecification spec, String prefix)
             throws IOException {
 
-        AbstractSingleActivityOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec,
-                new ConstantFileSplitProvider(new FileSplit[] {
-                        new FileSplit("asterix-005", createTempFile().getAbsolutePath()),
-                        new FileSplit("asterix-006", createTempFile().getAbsolutePath()) }), "\t");
+        ResultSetId rsId = new ResultSetId(1);
+        AbstractSingleActivityOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        spec.addResultSetId(rsId);
 
         return printer;
     }
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java
index 1ee4400..ec9be32 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java
@@ -24,6 +24,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -40,9 +41,10 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.LimitOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.OptimizedExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class OptimizedSortMergeTest extends AbstractIntegrationTest {
 
@@ -75,9 +77,11 @@
                         PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
@@ -128,9 +132,11 @@
         LimitOperatorDescriptor filter = new LimitOperatorDescriptor(spec, ordersDesc, outputLimit);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, filter, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/ScanPrintTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/ScanPrintTest.java
index 9355110..961f780 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/ScanPrintTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/ScanPrintTest.java
@@ -24,6 +24,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
@@ -42,7 +43,8 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class ScanPrintTest extends AbstractIntegrationTest {
     @Test
@@ -63,10 +65,11 @@
                 desc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] {
-                new FileSplit(NC2_ID, createTempFile().getAbsolutePath()),
-                new FileSplit(NC1_ID, createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        spec.addResultSetId(rsId);
+
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         IConnectorDescriptor conn = new OneToOneConnectorDescriptor(spec);
@@ -98,9 +101,11 @@
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        spec.addResultSetId(rsId);
+
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
@@ -135,9 +140,11 @@
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        spec.addResultSetId(rsId);
+
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SortMergeTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SortMergeTest.java
index 2c3fddf..0da93f2 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SortMergeTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SortMergeTest.java
@@ -24,6 +24,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -40,9 +41,10 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.InMemorySortOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class SortMergeTest extends AbstractIntegrationTest {
     @Test
@@ -73,9 +75,11 @@
                 ordersDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
@@ -118,9 +122,11 @@
                         PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SplitOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SplitOperatorTest.java
index 2b32142..6040748 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SplitOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/SplitOperatorTest.java
@@ -26,6 +26,7 @@
 import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
@@ -35,8 +36,9 @@
 import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.LineFileWriteOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.SplitOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class SplitOperatorTest extends AbstractIntegrationTest {
 
@@ -50,6 +52,8 @@
             Assert.assertEquals(lineA, lineB);
         }
         Assert.assertNull(fileB.readLine());
+        fileA.close();
+        fileB.close();
     }
 
     @Test
@@ -83,8 +87,11 @@
 
         IOperatorDescriptor outputOp[] = new IOperatorDescriptor[outputFile.length];
         for (int i = 0; i < outputArity; i++) {
-            outputOp[i] = new LineFileWriteOperatorDescriptor(spec, new FileSplit[] { new FileSplit(NC1_ID,
-                    outputFile[i].getAbsolutePath()) });
+            ResultSetId rsId = new ResultSetId(i);
+            spec.addResultSetId(rsId);
+
+            outputOp[i] = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                    ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
             PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, outputOp[i], locations);
         }
 
@@ -96,10 +103,10 @@
         for (int i = 0; i < outputArity; i++) {
             spec.addRoot(outputOp[i]);
         }
-        runTest(spec);
-
+        String[] expectedResultsFileNames = new String[outputArity];
         for (int i = 0; i < outputArity; i++) {
-            compareFiles(inputFileName, outputFile[i].getAbsolutePath());
+            expectedResultsFileNames[i] = inputFileName;
         }
+        runTestAndCompareResults(spec, expectedResultsFileNames);
     }
 }
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
index 61d4696..b5eb850 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
@@ -14,9 +14,7 @@
  */
 package edu.uci.ics.hyracks.tests.integration;
 
-import java.io.DataOutput;
 import java.io.File;
-import java.io.IOException;
 
 import org.junit.Test;
 
@@ -25,11 +23,10 @@
 import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
 import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -47,35 +44,15 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.join.GraceHashJoinOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.join.HybridHashJoinOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.join.InMemoryHashJoinOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.MaterializingOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.NoopNullWriterFactory;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class TPCHCustomerOrderHashJoinTest extends AbstractIntegrationTest {
-    private static class NoopNullWriterFactory implements INullWriterFactory {
-
-        private static final long serialVersionUID = 1L;
-        public static final NoopNullWriterFactory INSTANCE = new NoopNullWriterFactory();
-
-        private NoopNullWriterFactory() {
-        }
-
-        @Override
-        public INullWriter createNullWriter() {
-            return new INullWriter() {
-                @Override
-                public void writeNull(DataOutput out) throws HyracksDataException {
-                    try {
-                        out.writeShort(0);
-                    } catch (IOException e) {
-                        throw new HyracksDataException(e);
-                    }
-                }
-            };
-        }
-    }
 
     /*
      * TPCH Customer table: CREATE TABLE CUSTOMER ( C_CUSTKEY INTEGER NOT NULL,
@@ -148,9 +125,11 @@
                 custOrderJoinDesc, 128);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -229,9 +208,11 @@
                 custOrderJoinDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -310,9 +291,11 @@
                 custOrderJoinDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -392,9 +375,11 @@
                 custOrderJoinDesc, true, nullWriterFactories, 128);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -478,9 +463,11 @@
                 custOrderJoinDesc, true, nullWriterFactories);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -564,9 +551,11 @@
                 custOrderJoinDesc, true, nullWriterFactories);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -643,9 +632,11 @@
                 custOrderJoinDesc, 128);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
@@ -732,9 +723,11 @@
                 custOrderJoinDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
@@ -821,9 +814,11 @@
                 custOrderJoinDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
@@ -906,9 +901,11 @@
                 custOrderJoinDesc, 128);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
@@ -997,9 +994,11 @@
                 custOrderJoinDesc, 128);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordPartConn = new MToNPartitioningConnectorDescriptor(spec,
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
index 1e60372..99f2d18 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
@@ -25,10 +25,12 @@
 import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -43,8 +45,10 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.join.NestedLoopJoinOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.NoopNullWriterFactory;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class TPCHCustomerOrderNestedLoopJoinTest extends AbstractIntegrationTest {
     private static class JoinComparatorFactory implements ITuplePairComparatorFactory {
@@ -165,12 +169,15 @@
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
 
         NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
-                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 4);
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 4, false,
+                null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -239,12 +246,15 @@
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
 
         NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
-                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 5);
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 5, false,
+                null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -313,12 +323,97 @@
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
 
         NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
-                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 6);
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 6, false,
+                null);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
+        spec.connect(ordJoinConn, ordScanner, 0, join, 0);
+
+        IConnectorDescriptor custJoinConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(custJoinConn, custScanner, 0, join, 1);
+
+        IConnectorDescriptor joinPrinterConn = new MToNReplicatingConnectorDescriptor(spec);
+        spec.connect(joinPrinterConn, join, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Test
+    public void customerOrderCIDOuterJoinMulti() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] custSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/customer-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/customer-part2.tbl"))) };
+        IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
+        RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileSplit[] ordersSplits = new FileSplit[] {
+                new FileSplit(NC1_ID, new FileReference(new File("data/tpch0.001/orders-part1.tbl"))),
+                new FileSplit(NC2_ID, new FileReference(new File("data/tpch0.001/orders-part2.tbl"))) };
+        IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+
+        FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
+
+        INullWriterFactory[] nullWriterFactories = new INullWriterFactory[ordersDesc.getFieldCount()];
+        for (int j = 0; j < nullWriterFactories.length; j++) {
+            nullWriterFactories[j] = NoopNullWriterFactory.INSTANCE;
+        }
+
+        NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 5, true,
+                nullWriterFactories);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
+
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/UnionTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/UnionTest.java
index 37b55b8..5b323e6 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/UnionTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/UnionTest.java
@@ -22,6 +22,7 @@
 import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
@@ -33,8 +34,9 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.union.UnionAllOperatorDescriptor;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class UnionTest extends AbstractIntegrationTest {
     @Test
@@ -65,10 +67,11 @@
         UnionAllOperatorDescriptor unionAll = new UnionAllOperatorDescriptor(spec, 2, desc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, unionAll, NC2_ID, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] {
-                new FileSplit(NC2_ID, createTempFile().getAbsolutePath()),
-                new FileSplit(NC1_ID, createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), csvScanner01, 0, unionAll, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/BinaryTokenizerOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/BinaryTokenizerOperatorTest.java
index 836e72e..600b54b 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/BinaryTokenizerOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/BinaryTokenizerOperatorTest.java
@@ -8,6 +8,7 @@
 import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
@@ -21,13 +22,14 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class BinaryTokenizerOperatorTest extends AbstractIntegrationTest {
 
@@ -58,9 +60,11 @@
                 tokenizerRecDesc, tokenizerFactory, tokenFields, keyFields);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, binaryTokenizer, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), dblpTitleScanner, 0, binaryTokenizer, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java
index 2206a26..b5a4df8 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java
@@ -8,6 +8,7 @@
 import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
@@ -21,13 +22,14 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class InvertedIndexOperatorsTest extends AbstractIntegrationTest {
 
@@ -58,9 +60,11 @@
                 tokenizerRecDesc, tokenizerFactory, tokenFields, projFields);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, binaryTokenizer, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), dblpTitleScanner, 0, binaryTokenizer, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/WordInvertedIndexTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/WordInvertedIndexTest.java
index d8fd48e..d1071a3 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/WordInvertedIndexTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/WordInvertedIndexTest.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
@@ -48,8 +49,8 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
@@ -74,6 +75,7 @@
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class WordInvertedIndexTest extends AbstractIntegrationTest {
     static {
@@ -87,9 +89,12 @@
     private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
     private final static String sep = System.getProperty("file.separator");
     private final static String dateString = simpleDateFormat.format(new Date());
-    private final static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + "primaryBtree" + dateString;
-    private final static String btreeFileName = System.getProperty("java.io.tmpdir") + sep + "invIndexBtree" + dateString;
-    private final static String invListsFileName = System.getProperty("java.io.tmpdir") + sep + "invIndexLists" + dateString;
+    private final static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + "primaryBtree"
+            + dateString;
+    private final static String btreeFileName = System.getProperty("java.io.tmpdir") + sep + "invIndexBtree"
+            + dateString;
+    private final static String invListsFileName = System.getProperty("java.io.tmpdir") + sep + "invIndexLists"
+            + dateString;
 
     private IFileSplitProvider primaryFileSplitProvider = new ConstantFileSplitProvider(
             new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
@@ -155,7 +160,7 @@
         spec.addRoot(primaryCreateOp);
         runTest(spec);
     }
-    
+
     @Test
     public void testConjunctiveSearcher() throws Exception {
         IInvertedIndexSearchModifierFactory conjunctiveSearchModifierFactory = new ConjunctiveSearchModifierFactory();
@@ -180,8 +185,9 @@
     private IOperatorDescriptor createPrimaryBulkLoadOp(JobSpecification spec) {
         int[] fieldPermutation = { 0, 1 };
         TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryFileSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
-                btreeDataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, indexRegistryProvider, primaryFileSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, fieldPermutation, 0.7f, btreeDataflowHelperFactory,
+                NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
         return primaryBtreeBulkLoad;
     }
@@ -206,8 +212,9 @@
         int[] lowKeyFields = null; // - infinity
         int[] highKeyFields = null; // + infinity
         BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryFileSplitProvider, primaryTypeTraits, primaryComparatorFactories, lowKeyFields,
-                highKeyFields, true, true, btreeDataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, indexRegistryProvider, primaryFileSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, lowKeyFields, highKeyFields, true, true, btreeDataflowHelperFactory, false,
+                NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
         return primaryBtreeSearchOp;
     }
@@ -227,9 +234,12 @@
         JobSpecification spec = new JobSpecification();
         IOperatorDescriptor keyProviderOp = createScanKeyProviderOp(spec);
         IOperatorDescriptor primaryScanOp = createPrimaryScanOp(spec);
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, printer, 0);
         spec.addRoot(printer);
@@ -265,15 +275,14 @@
     public void createInvertedIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
         InvertedIndexCreateOperatorDescriptor invIndexCreateOp = new InvertedIndexCreateOperatorDescriptor(spec,
-                storageManager, btreeFileSplitProvider, invListsFileSplitProvider,
-                indexRegistryProvider, tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits,
-                invListsComparatorFactories, tokenizerFactory, btreeDataflowHelperFactory,
-                NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, btreeFileSplitProvider, invListsFileSplitProvider, indexRegistryProvider,
+                tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits, invListsComparatorFactories,
+                tokenizerFactory, btreeDataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, invIndexCreateOp, NC1_ID);
         spec.addRoot(invIndexCreateOp);
         runTest(spec);
     }
-    
+
     public void loadInvertedIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
         IOperatorDescriptor keyProviderOp = createScanKeyProviderOp(spec);
@@ -325,20 +334,23 @@
         JobSpecification spec = new JobSpecification();
         IOperatorDescriptor queryProviderOp = createQueryProviderOp(spec, queryString);
         IOperatorDescriptor invIndexSearchOp = createInvertedIndexSearchOp(spec, searchModifierFactory);
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         spec.connect(new OneToOneConnectorDescriptor(spec), queryProviderOp, 0, invIndexSearchOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), invIndexSearchOp, 0, printer, 0);
         spec.addRoot(printer);
         runTest(spec);
     }
-    
+
     @AfterClass
     public static void cleanup() throws Exception {
-    	File primary = new File(primaryFileName);
-    	File btree = new File(btreeFileName);
-    	File invLists = new File(invListsFileName);
+        File primary = new File(primaryFileName);
+        File btree = new File(btreeFileName);
+        File invLists = new File(invListsFileName);
         primary.deleteOnExit();
         btree.deleteOnExit();
         invLists.deleteOnExit();
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
index 6625148..92b6e14 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -47,8 +48,8 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
@@ -64,6 +65,7 @@
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class RTreePrimaryIndexSearchOperatorTest extends AbstractIntegrationTest {
     static {
@@ -190,9 +192,11 @@
                 dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryRTreeSearchOp, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryRTreeSearchOp, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
index ef2950e..2a00394 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
@@ -29,6 +29,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -45,6 +46,7 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
@@ -64,6 +66,7 @@
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class RTreePrimaryIndexStatsOperatorTest extends AbstractIntegrationTest {
     static {
@@ -175,9 +178,11 @@
                 primaryTypeTraits, primaryComparatorFactories, dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryStatsOp, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), primaryStatsOp, 0, printer, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
index 030afcf..1d86037 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -47,8 +48,8 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
@@ -67,6 +68,7 @@
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class RTreeSecondaryIndexSearchOperatorTest extends AbstractIntegrationTest {
     static {
@@ -297,9 +299,11 @@
                 secondaryTypeTraits, secondaryComparatorFactories, keyFields, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryRTreeSearchOp, NC1_ID);
 
-        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
-                createTempFile().getAbsolutePath()) });
-        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        ResultSetId rsId = new ResultSetId(1);
+        spec.addResultSetId(rsId);
+
+        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
+                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryRTreeSearchOp, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/util/NoopNullWriterFactory.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/util/NoopNullWriterFactory.java
new file mode 100644
index 0000000..d119509
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/util/NoopNullWriterFactory.java
@@ -0,0 +1,31 @@
+package edu.uci.ics.hyracks.tests.util;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
+import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class NoopNullWriterFactory implements INullWriterFactory {
+
+    private static final long serialVersionUID = 1L;
+    public static final NoopNullWriterFactory INSTANCE = new NoopNullWriterFactory();
+
+    private NoopNullWriterFactory() {
+    }
+
+    @Override
+    public INullWriter createNullWriter() {
+        return new INullWriter() {
+            @Override
+            public void writeNull(DataOutput out) throws HyracksDataException {
+                try {
+                    out.writeShort(0);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+        };
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/util/ResultSerializerFactoryProvider.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/util/ResultSerializerFactoryProvider.java
new file mode 100644
index 0000000..19c4475
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/util/ResultSerializerFactoryProvider.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.tests.util;
+
+import java.io.DataInputStream;
+import java.io.PrintStream;
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.dataflow.value.IResultSerializer;
+import edu.uci.ics.hyracks.api.dataflow.value.IResultSerializerFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+
+public class ResultSerializerFactoryProvider implements Serializable {
+    private static final long serialVersionUID = 1L;
+
+    public static final ResultSerializerFactoryProvider INSTANCE = new ResultSerializerFactoryProvider();
+
+    private ResultSerializerFactoryProvider() {
+    }
+
+    public IResultSerializerFactory getResultSerializerFactoryProvider() {
+        return new IResultSerializerFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public IResultSerializer createResultSerializer(final RecordDescriptor recordDesc,
+                    final PrintStream printStream) {
+                return new IResultSerializer() {
+                    private static final long serialVersionUID = 1L;
+
+                    ByteBufferInputStream bbis = new ByteBufferInputStream();
+                    DataInputStream di = new DataInputStream(bbis);
+
+                    @Override
+                    public void init() throws HyracksDataException {
+
+                    }
+
+                    @Override
+                    public boolean appendTuple(IFrameTupleAccessor tAccess, int tIdx) throws HyracksDataException {
+                        int start = tAccess.getTupleStartOffset(tIdx) + tAccess.getFieldSlotsLength();
+
+                        bbis.setByteBuffer(tAccess.getBuffer(), start);
+
+                        Object[] record = new Object[recordDesc.getFieldCount()];
+                        for (int i = 0; i < record.length; ++i) {
+                            Object instance = recordDesc.getFields()[i].deserialize(di);
+                            if (i == 0) {
+                                printStream.print(String.valueOf(instance));
+                            } else {
+                                printStream.print(", " + String.valueOf(instance));
+                            }
+                        }
+                        printStream.println();
+                        return true;
+                    }
+                };
+            }
+        };
+    }
+}
diff --git a/hyracks/hyracks-examples/pom.xml b/hyracks/hyracks-examples/pom.xml
index 8ce8108..551e2be 100644
--- a/hyracks/hyracks-examples/pom.xml
+++ b/hyracks/hyracks-examples/pom.xml
@@ -1,8 +1,6 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks</groupId>
   <artifactId>hyracks-examples</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <packaging>pom</packaging>
   <name>hyracks-examples</name>
 
diff --git a/hyracks/hyracks-examples/text-example/pom.xml b/hyracks/hyracks-examples/text-example/pom.xml
index ba8649e..0476bb3 100644
--- a/hyracks/hyracks-examples/text-example/pom.xml
+++ b/hyracks/hyracks-examples/text-example/pom.xml
@@ -2,7 +2,6 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples</groupId>
   <artifactId>text-example</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <packaging>pom</packaging>
   <name>text-example</name>
 
diff --git a/hyracks/hyracks-examples/text-example/textapp/pom.xml b/hyracks/hyracks-examples/text-example/textapp/pom.xml
index 8ac69d8..945df0b 100644
--- a/hyracks/hyracks-examples/text-example/textapp/pom.xml
+++ b/hyracks/hyracks-examples/text-example/textapp/pom.xml
@@ -2,7 +2,6 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples.text</groupId>
   <artifactId>textapp</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <name>textapp</name>
 
   <parent>
@@ -142,8 +141,9 @@
       	<artifactId>maven-compiler-plugin</artifactId>
       	<version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
       <plugin>
diff --git a/hyracks/hyracks-examples/text-example/textclient/pom.xml b/hyracks/hyracks-examples/text-example/textclient/pom.xml
index d593ef9..4aace73 100644
--- a/hyracks/hyracks-examples/text-example/textclient/pom.xml
+++ b/hyracks/hyracks-examples/text-example/textclient/pom.xml
@@ -2,7 +2,6 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples.text</groupId>
   <artifactId>textclient</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <name>textclient</name>
 
   <parent>
@@ -33,13 +32,15 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
         <executions>
           <execution>
           <id>textclient</id>
diff --git a/hyracks/hyracks-examples/text-example/texthelper/pom.xml b/hyracks/hyracks-examples/text-example/texthelper/pom.xml
index 8e32c8c..bcb280c 100644
--- a/hyracks/hyracks-examples/text-example/texthelper/pom.xml
+++ b/hyracks/hyracks-examples/text-example/texthelper/pom.xml
@@ -36,8 +36,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-examples/tpch-example/pom.xml b/hyracks/hyracks-examples/tpch-example/pom.xml
index b237c9b..9951792 100644
--- a/hyracks/hyracks-examples/tpch-example/pom.xml
+++ b/hyracks/hyracks-examples/tpch-example/pom.xml
@@ -2,7 +2,6 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples</groupId>
   <artifactId>tpch-example</artifactId>
-  <version>0.2.3-SNAPSHOT</version>
   <packaging>pom</packaging>
   <name>tpch-example</name>
 
diff --git a/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml b/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
index 3170306..4e0d9f0 100644
--- a/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
+++ b/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
@@ -29,13 +29,15 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
         <executions>
           <execution>
             <configuration>
diff --git a/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java b/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java
index 01ccdef..0ad0ff0 100644
--- a/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java
+++ b/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java
@@ -199,7 +199,7 @@
 
         if ("nestedloop".equalsIgnoreCase(algo)) {
             join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
-                    PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1), custOrderJoinDesc, memSize);
+                    PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1), custOrderJoinDesc, memSize, false, null);
 
         } else if ("gracehash".equalsIgnoreCase(algo)) {
             join = new GraceHashJoinOperatorDescriptor(
diff --git a/hyracks/hyracks-hadoop-compat/pom.xml b/hyracks/hyracks-hadoop-compat/pom.xml
index 849efe3..87aaaa7 100644
--- a/hyracks/hyracks-hadoop-compat/pom.xml
+++ b/hyracks/hyracks-hadoop-compat/pom.xml
@@ -18,13 +18,15 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
         <executions>
           <execution>
             <configuration>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml
new file mode 100644
index 0000000..9092655
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml
@@ -0,0 +1,108 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>hyracks-hdfs-0.20.2</artifactId>
+	<name>hyracks-hdfs-0.20.2</name>
+	<parent>
+		<artifactId>hyracks-hdfs</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<profiles>
+		<profile>
+			<activation>
+				<activeByDefault>true</activeByDefault>
+			</activation>
+			<id>hadoop-0.20.2</id>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-core</artifactId>
+					<version>0.20.2</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-test</artifactId>
+					<version>0.20.2</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+				<property>
+					<name>hadoop</name>
+					<value>1.0.4</value>
+				</property>
+			</activation>
+			<id>hadoop-1.0.4</id>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-core</artifactId>
+					<version>1.0.4</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-minicluster</artifactId>
+					<version>1.0.4</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-test</artifactId>
+					<version>1.0.4</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+	</profiles>
+
+	<dependencies>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
new file mode 100644
index 0000000..16ce76b
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
@@ -0,0 +1,39 @@
+package edu.uci.ics.hyracks.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * The wrapper to generate TaskTattemptContext
+ */
+public class ContextFactory {
+
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    public TaskAttemptContext createContext(Configuration conf, TaskAttemptID tid) throws HyracksDataException {
+        try {
+            return new Mapper().new Context(conf, tid, null, null, null, null, null);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public TaskAttemptContext createContext(Configuration conf, int partition) throws HyracksDataException {
+        try {
+            TaskAttemptID tid = new TaskAttemptID("", 0, true, partition, 0);
+            return new TaskAttemptContext(conf, tid);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public JobContext createJobContext(Configuration conf) {
+        return new JobContext(conf, new JobID("0", 0));
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
new file mode 100644
index 0000000..9133d35
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hyracks.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class MiniDFSClusterFactory {
+
+    public MiniDFSCluster getMiniDFSCluster(Configuration conf, int numberOfNC) throws HyracksDataException {
+        try {
+            return new MiniDFSCluster(conf, numberOfNC, true, null);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml
new file mode 100644
index 0000000..8b7ecf0
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml
@@ -0,0 +1,219 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>hyracks-hdfs-0.23.1</artifactId>
+	<name>hyracks-hdfs-0.23.1</name>
+	<parent>
+		<artifactId>hyracks-hdfs</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<profiles>
+		<profile>
+			<activation>
+				<activeByDefault>true</activeByDefault>
+				<property>
+					<name>hadoop</name>
+					<value>0.23.1</value>
+				</property>
+			</activation>
+			<id>hadoop-0.23.1</id>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-common</artifactId>
+					<version>0.23.1</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-mapreduce-client-core</artifactId>
+					<version>0.23.1</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-hdfs</artifactId>
+					<version>0.23.1</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-minicluster</artifactId>
+					<version>0.23.1</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<id>hadoop-0.23.6</id>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+				<property>
+					<name>hadoop</name>
+					<value>0.23.6</value>
+				</property>
+			</activation>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-common</artifactId>
+					<version>0.23.6</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-mapreduce-client-core</artifactId>
+					<version>0.23.6</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-hdfs</artifactId>
+					<version>0.23.6</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-minicluster</artifactId>
+					<version>0.23.6</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+				<property>
+					<name>hadoop</name>
+					<value>cdh-4.2</value>
+				</property>
+			</activation>
+			<id>cdh-4.2</id>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-common</artifactId>
+					<version>2.0.0-cdh4.2.0</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-mapreduce-client-core</artifactId>
+					<version>2.0.0-cdh4.2.0</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-hdfs</artifactId>
+					<version>2.0.0-cdh4.2.0</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-minicluster</artifactId>
+					<version>2.0.0-cdh4.2.0</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+				<property>
+					<name>hadoop</name>
+					<value>cdh-4.1</value>
+				</property>
+			</activation>
+			<id>cdh-4.1</id>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-common</artifactId>
+					<version>2.0.0-cdh4.1.0</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-mapreduce-client-core</artifactId>
+					<version>2.0.0-cdh4.1.0</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-hdfs</artifactId>
+					<version>2.0.0-cdh4.1.0</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-minicluster</artifactId>
+					<version>2.0.0-cdh4.1.0</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+	</profiles>
+
+	<dependencies>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+	</dependencies>
+
+	<repositories>
+		<repository>
+			<id>cloudera</id>
+			<url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
+		</repository>
+	</repositories>
+</project>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
new file mode 100644
index 0000000..ddcce64
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/ContextFactory.java
@@ -0,0 +1,40 @@
+package edu.uci.ics.hyracks.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.task.JobContextImpl;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * The wrapper to generate TaskTattemptContext
+ */
+public class ContextFactory {
+
+    public TaskAttemptContext createContext(Configuration conf, TaskAttemptID tid) throws HyracksDataException {
+        try {
+            return new TaskAttemptContextImpl(conf, tid);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public TaskAttemptContext createContext(Configuration conf, int partition) throws HyracksDataException {
+        try {
+            TaskAttemptID tid = new TaskAttemptID("", 0, TaskType.REDUCE, partition, 0);
+            return new TaskAttemptContextImpl(conf, tid);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public JobContext createJobContext(Configuration conf) {
+        return new JobContextImpl(conf, new JobID("0", 0));
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
new file mode 100644
index 0000000..ded75f1
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/src/main/java/edu/uci/ics/hyracks/hdfs/MiniDFSClusterFactory.java
@@ -0,0 +1,20 @@
+package edu.uci.ics.hyracks.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class MiniDFSClusterFactory {
+
+    public MiniDFSCluster getMiniDFSCluster(Configuration conf, int numberOfNC) throws HyracksDataException {
+        try {
+            MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
+            builder.numDataNodes(numberOfNC);
+            MiniDFSCluster dfsCluster = builder.build();
+            return dfsCluster;
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
new file mode 100644
index 0000000..a28c698a
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
@@ -0,0 +1,217 @@
+<?xml version="1.0"?>
+<project
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>hyracks-hdfs-core</artifactId>
+	<name>hyracks-hdfs-core</name>
+	<parent>
+		<artifactId>hyracks-hdfs</artifactId>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.7.2</version>
+				<configuration>
+					<forkMode>pertest</forkMode>
+					<includes>
+						<include>**/*TestSuite.java</include>
+						<include>**/*Test.java</include>
+					</includes>
+				</configuration>
+			</plugin>
+			<plugin>
+				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
+				<configuration>
+					<filesets>
+						<fileset>
+							<directory>.</directory>
+							<includes>
+								<include>edu*</include>
+								<include>actual*</include>
+								<include>build*</include>
+								<include>expect*</include>
+								<include>ClusterController*</include>
+								<include>edu.uci.*</include>
+							</includes>
+						</fileset>
+					</filesets>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+
+	<profiles>
+		<profile>
+			<activation>
+				<activeByDefault>true</activeByDefault>
+			</activation>
+			<id>hadoop-0.20.2</id>
+			<dependencies>
+				<dependency>
+					<groupId>edu.uci.ics.hyracks</groupId>
+					<artifactId>hyracks-hdfs-0.20.2</artifactId>
+					<version>0.2.3-SNAPSHOT</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+				<property>
+					<name>hadoop</name>
+					<value>1.0.4</value>
+				</property>
+			</activation>
+			<id>hadoop-1.0.4</id>
+			<dependencies>
+				<dependency>
+					<groupId>edu.uci.ics.hyracks</groupId>
+					<artifactId>hyracks-hdfs-0.20.2</artifactId>
+					<version>0.2.3-SNAPSHOT</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+				<property>
+					<name>hadoop</name>
+					<value>0.23.1</value>
+				</property>
+			</activation>
+			<id>hadoop-0.23.1</id>
+			<dependencies>
+				<dependency>
+					<groupId>edu.uci.ics.hyracks</groupId>
+					<artifactId>hyracks-hdfs-0.23.1</artifactId>
+					<version>0.2.3-SNAPSHOT</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+				<property>
+					<name>hadoop</name>
+					<value>0.23.6</value>
+				</property>
+			</activation>
+			<id>hadoop-0.23.6</id>
+			<dependencies>
+				<dependency>
+					<groupId>edu.uci.ics.hyracks</groupId>
+					<artifactId>hyracks-hdfs-0.23.1</artifactId>
+					<version>0.2.3-SNAPSHOT</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+				<property>
+					<name>hadoop</name>
+					<value>cdh-4.1</value>
+				</property>
+			</activation>
+			<id>cdh-4.1</id>
+			<dependencies>
+				<dependency>
+					<groupId>edu.uci.ics.hyracks</groupId>
+					<artifactId>hyracks-hdfs-0.23.1</artifactId>
+					<version>0.2.3-SNAPSHOT</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<activation>
+				<activeByDefault>false</activeByDefault>
+				<property>
+					<name>hadoop</name>
+					<value>cdh-4.2</value>
+				</property>
+			</activation>
+			<id>cdh-4.2</id>
+			<dependencies>
+				<dependency>
+					<groupId>edu.uci.ics.hyracks</groupId>
+					<artifactId>hyracks-hdfs-0.23.1</artifactId>
+					<version>0.2.3-SNAPSHOT</version>
+					<type>jar</type>
+					<scope>compile</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+	</profiles>
+
+	<dependencies>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>3.8.1</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-api</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-std</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-common</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-cc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-nc</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.commons</groupId>
+			<artifactId>commons-io</artifactId>
+			<version>1.3.2</version>
+			<scope>test</scope>
+		</dependency>
+	</dependencies>
+</project>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParser.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParser.java
new file mode 100644
index 0000000..5d35ec5
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParser.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.api;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Users need to implement this interface to use the HDFSReadOperatorDescriptor.
+ * 
+ * @param <K>
+ *            the key type
+ * @param <V>
+ *            the value type
+ */
+public interface IKeyValueParser<K, V> {
+
+    /**
+     * Initialize the key value parser.
+     * 
+     * @param writer
+     *            The hyracks writer for outputting data.
+     * @throws HyracksDataException
+     */
+    public void open(IFrameWriter writer) throws HyracksDataException;
+
+    /**
+     * Parse a key-value pair returned by HDFS record reader to a tuple.
+     * when the parsers' internal buffer is full, it can flush the buffer to the writer
+     * 
+     * @param key
+     *            The key returned from Hadoop's InputReader.
+     * @param value
+     *            The value returned from Hadoop's InputReader.
+     * @param writer
+     *            The hyracks writer for outputting data.
+     * @throws HyracksDataException
+     */
+    public void parse(K key, V value, IFrameWriter writer) throws HyracksDataException;
+
+    /**
+     * Flush the residual tuples in the internal buffer to the writer.
+     * This method is called in the close() of HDFSReadOperatorDescriptor.
+     * 
+     * @param writer
+     *            The hyracks writer for outputting data.
+     * @throws HyracksDataException
+     */
+    public void close(IFrameWriter writer) throws HyracksDataException;
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParserFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParserFactory.java
new file mode 100644
index 0000000..7d6f868
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/IKeyValueParserFactory.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.api;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Users need to implement this interface to use the HDFSReadOperatorDescriptor.
+ * 
+ * @param <K>
+ *            the key type
+ * @param <V>
+ *            the value type
+ */
+public interface IKeyValueParserFactory<K, V> extends Serializable {
+
+    /**
+     * This method creates a key-value parser.
+     * 
+     * @param ctx
+     *            the IHyracksTaskContext
+     * @return a key-value parser instance.
+     */
+    public IKeyValueParser<K, V> createKeyValueParser(IHyracksTaskContext ctx) throws HyracksDataException;
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/INcCollection.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/INcCollection.java
new file mode 100644
index 0000000..c51c1dd
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/INcCollection.java
@@ -0,0 +1,11 @@
+package edu.uci.ics.hyracks.hdfs.api;
+
+import org.apache.hadoop.mapred.InputSplit;
+
+@SuppressWarnings("deprecation")
+public interface INcCollection {
+
+    public String findNearestAvailableSlot(InputSplit split);
+
+    public int numAvailableSlots();
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/INcCollectionBuilder.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/INcCollectionBuilder.java
new file mode 100644
index 0000000..ef3ff23
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/INcCollectionBuilder.java
@@ -0,0 +1,18 @@
+package edu.uci.ics.hyracks.hdfs.api;
+
+import java.util.List;
+import java.util.Map;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+
+/**
+ * NC collections
+ * 
+ * @author yingyib
+ */
+public interface INcCollectionBuilder {
+
+    public INcCollection build(Map<String, NodeControllerInfo> ncNameToNcInfos,
+            Map<String, List<String>> ipToNcMapping, Map<String, Integer> ncNameToIndex, String[] NCs, int[] workloads,
+            int slotLimit);
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriter.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriter.java
new file mode 100644
index 0000000..8e85627
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriter.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.api;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+/**
+ * Users need to implement this interface to use the HDFSWriteOperatorDescriptor.
+ */
+public interface ITupleWriter {
+
+    /**
+     * Initialize the the tuple writer.
+     * 
+     * @param output
+     *            The channel for output data.
+     * @throws HyracksDataException
+     */
+    public void open(DataOutput output) throws HyracksDataException;
+
+    /**
+     * Write the tuple to the DataOutput.
+     * 
+     * @param output
+     *            the DataOutput channel
+     * @param tuple
+     *            the tuple to write
+     * @throws HyracksDataException
+     */
+    public void write(DataOutput output, ITupleReference tuple) throws HyracksDataException;
+
+    /**
+     * Close the writer.
+     * 
+     * @param output
+     *            The channel for output data.
+     * @throws HyracksDataException
+     */
+    public void close(DataOutput output) throws HyracksDataException;
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriterFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriterFactory.java
new file mode 100644
index 0000000..9a025c2
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/api/ITupleWriterFactory.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.api;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Users need to implement this interface to use the HDFSWriteOperatorDescriptor.
+ */
+public interface ITupleWriterFactory extends Serializable {
+
+    /**
+     * @param ctx
+     *            the IHyracksTaskContext
+     * @return a tuple writer instance
+     */
+    public ITupleWriter getTupleWriter(IHyracksTaskContext ctx) throws HyracksDataException;
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/ConfFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/ConfFactory.java
new file mode 100644
index 0000000..4fa0164
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/ConfFactory.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hyracks.hdfs.dataflow;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.Serializable;
+
+import org.apache.hadoop.mapred.JobConf;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+@SuppressWarnings("deprecation")
+public class ConfFactory implements Serializable {
+    private static final long serialVersionUID = 1L;
+    private byte[] confBytes;
+
+    public ConfFactory(JobConf conf) throws HyracksDataException {
+        try {
+            ByteArrayOutputStream bos = new ByteArrayOutputStream();
+            DataOutputStream dos = new DataOutputStream(bos);
+            conf.write(dos);
+            confBytes = bos.toByteArray();
+            dos.close();
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public JobConf getConf() throws HyracksDataException {
+        try {
+            JobConf conf = new JobConf();
+            DataInputStream dis = new DataInputStream(new ByteArrayInputStream(confBytes));
+            conf.readFields(dis);
+            dis.close();
+            return conf;
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java
new file mode 100644
index 0000000..f49688b
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.dataflow;
+
+import java.util.Arrays;
+
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParser;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParserFactory;
+
+/**
+ * The HDFS file read operator using the Hadoop old API.
+ * To use this operator, a user need to provide an IKeyValueParserFactory implementation which convert
+ * key-value pairs into tuples.
+ */
+@SuppressWarnings({ "deprecation", "rawtypes" })
+public class HDFSReadOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private final ConfFactory confFactory;
+    private final InputSplitsFactory splitsFactory;
+    private final String[] scheduledLocations;
+    private final IKeyValueParserFactory tupleParserFactory;
+    private final boolean[] executed;
+
+    /**
+     * The constructor of HDFSReadOperatorDescriptor.
+     * 
+     * @param spec
+     *            the JobSpecification object
+     * @param rd
+     *            the output record descriptor
+     * @param conf
+     *            the Hadoop JobConf object, which contains the input format and the input paths
+     * @param splits
+     *            the array of FileSplits (HDFS chunks).
+     * @param scheduledLocations
+     *            the node controller names to scan the FileSplits, which is an one-to-one mapping. The String array
+     *            is obtained from the edu.cui.ics.hyracks.hdfs.scheduler.Scheduler.getLocationConstraints(InputSplits[]).
+     * @param tupleParserFactory
+     *            the ITupleParserFactory implementation instance.
+     * @throws HyracksException
+     */
+    public HDFSReadOperatorDescriptor(JobSpecification spec, RecordDescriptor rd, JobConf conf, InputSplit[] splits,
+            String[] scheduledLocations, IKeyValueParserFactory tupleParserFactory) throws HyracksException {
+        super(spec, 0, 1);
+        try {
+            this.splitsFactory = new InputSplitsFactory(splits);
+            this.confFactory = new ConfFactory(conf);
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+        this.scheduledLocations = scheduledLocations;
+        this.executed = new boolean[scheduledLocations.length];
+        Arrays.fill(executed, false);
+        this.tupleParserFactory = tupleParserFactory;
+        this.recordDescriptors[0] = rd;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+            throws HyracksDataException {
+        final InputSplit[] inputSplits = splitsFactory.getSplits();
+
+        return new AbstractUnaryOutputSourceOperatorNodePushable() {
+            private String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
+
+            @SuppressWarnings("unchecked")
+            @Override
+            public void initialize() throws HyracksDataException {
+                ClassLoader ctxCL = Thread.currentThread().getContextClassLoader();
+                try {
+                    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+                    JobConf conf = confFactory.getConf();
+                    IKeyValueParser parser = tupleParserFactory.createKeyValueParser(ctx);
+                    writer.open();
+                    parser.open(writer);
+                    InputFormat inputFormat = conf.getInputFormat();
+                    for (int i = 0; i < inputSplits.length; i++) {
+                        /**
+                         * read all the partitions scheduled to the current node
+                         */
+                        if (scheduledLocations[i].equals(nodeName)) {
+                            /**
+                             * pick an unread split to read
+                             * synchronize among simultaneous partitions in the same machine
+                             */
+                            synchronized (executed) {
+                                if (executed[i] == false) {
+                                    executed[i] = true;
+                                } else {
+                                    continue;
+                                }
+                            }
+
+                            /**
+                             * read the split
+                             */
+                            RecordReader reader = inputFormat.getRecordReader(inputSplits[i], conf, Reporter.NULL);
+                            Object key = reader.createKey();
+                            Object value = reader.createValue();
+                            while (reader.next(key, value) == true) {
+                                parser.parse(key, value, writer);
+                            }
+                        }
+                    }
+                    parser.close(writer);
+                    writer.close();
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
+                }
+            }
+        };
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSWriteOperatorDescriptor.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSWriteOperatorDescriptor.java
new file mode 100644
index 0000000..3ce6b2a
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/HDFSWriteOperatorDescriptor.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.dataflow;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobConf;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriter;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriterFactory;
+
+/**
+ * The HDFS file write operator using the Hadoop old API.
+ * To use this operator, a user need to provide an ITupleWriterFactory.
+ */
+@SuppressWarnings("deprecation")
+public class HDFSWriteOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private ConfFactory confFactory;
+    private ITupleWriterFactory tupleWriterFactory;
+
+    /**
+     * The constructor of HDFSWriteOperatorDescriptor.
+     * 
+     * @param spec
+     *            the JobSpecification object
+     * @param conf
+     *            the Hadoop JobConf which contains the output path
+     * @param tupleWriterFactory
+     *            the ITupleWriterFactory implementation object
+     * @throws HyracksException
+     */
+    public HDFSWriteOperatorDescriptor(JobSpecification spec, JobConf conf, ITupleWriterFactory tupleWriterFactory)
+            throws HyracksException {
+        super(spec, 1, 0);
+        this.confFactory = new ConfFactory(conf);
+        this.tupleWriterFactory = tupleWriterFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+            throws HyracksDataException {
+
+        return new AbstractUnaryInputSinkOperatorNodePushable() {
+
+            private FSDataOutputStream dos;
+            private RecordDescriptor inputRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);;
+            private FrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), inputRd);
+            private FrameTupleReference tuple = new FrameTupleReference();
+            private ITupleWriter tupleWriter;
+            private ClassLoader ctxCL;
+
+            @Override
+            public void open() throws HyracksDataException {
+                ctxCL = Thread.currentThread().getContextClassLoader();
+                Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+                JobConf conf = confFactory.getConf();
+                String outputDirPath = FileOutputFormat.getOutputPath(conf).toString();
+                String fileName = outputDirPath + File.separator + "part-" + partition;
+
+                tupleWriter = tupleWriterFactory.getTupleWriter(ctx);
+                try {
+                    FileSystem dfs = FileSystem.get(conf);
+                    dos = dfs.create(new Path(fileName), true);
+                    tupleWriter.open(dos);
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                accessor.reset(buffer);
+                int tupleCount = accessor.getTupleCount();
+                for (int i = 0; i < tupleCount; i++) {
+                    tuple.reset(accessor, i);
+                    tupleWriter.write(dos, tuple);
+                }
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                try {
+                    tupleWriter.close(dos);
+                    dos.close();
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
+                }
+            }
+
+        };
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/InputSplitsFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/InputSplitsFactory.java
new file mode 100644
index 0000000..147e872
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/dataflow/InputSplitsFactory.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.dataflow;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.Serializable;
+import java.lang.reflect.Constructor;
+
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.InputSplit;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+@SuppressWarnings({ "deprecation", "rawtypes" })
+public class InputSplitsFactory implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+    private byte[] splitBytes;
+    private String splitClassName;
+
+    public InputSplitsFactory(InputSplit[] splits) throws HyracksDataException {
+        splitBytes = splitsToBytes(splits);
+        if (splits.length > 0) {
+            splitClassName = splits[0].getClass().getName();
+        } else {
+            splitClassName = FileSplit.class.getName();
+        }
+    }
+
+    public InputSplit[] getSplits() throws HyracksDataException {
+        return bytesToSplits(splitBytes);
+    }
+
+    /**
+     * Convert splits to bytes.
+     * 
+     * @param splits
+     *            input splits
+     * @return bytes which serialize the splits
+     * @throws IOException
+     */
+    private byte[] splitsToBytes(InputSplit[] splits) throws HyracksDataException {
+        try {
+            ByteArrayOutputStream bos = new ByteArrayOutputStream();
+            DataOutputStream dos = new DataOutputStream(bos);
+            dos.writeInt(splits.length);
+            for (int i = 0; i < splits.length; i++) {
+                splits[i].write(dos);
+            }
+            dos.close();
+            return bos.toByteArray();
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    /**
+     * Covert bytes to splits.
+     * 
+     * @param bytes
+     * @return
+     * @throws HyracksDataException
+     */
+    private InputSplit[] bytesToSplits(byte[] bytes) throws HyracksDataException {
+        try {
+            Class splitClass = Class.forName(splitClassName);
+            Constructor[] constructors = splitClass.getDeclaredConstructors();
+            Constructor defaultConstructor = null;
+            for (Constructor constructor : constructors) {
+                if (constructor.getParameterTypes().length == 0) {
+                    constructor.setAccessible(true);
+                    defaultConstructor = constructor;
+                }
+            }
+            ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
+            DataInputStream dis = new DataInputStream(bis);
+            int size = dis.readInt();
+            InputSplit[] splits = new InputSplit[size];
+            for (int i = 0; i < size; i++) {
+                splits[i] = (InputSplit) defaultConstructor.newInstance();
+                splits[i].readFields(dis);
+            }
+            dis.close();
+            return splits;
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryComparatorFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryComparatorFactory.java
new file mode 100644
index 0000000..90c5977
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryComparatorFactory.java
@@ -0,0 +1,32 @@
+package edu.uci.ics.hyracks.hdfs.lib;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+
+public class RawBinaryComparatorFactory implements IBinaryComparatorFactory {
+
+    private static final long serialVersionUID = 1L;
+    public static IBinaryComparatorFactory INSTANCE = new RawBinaryComparatorFactory();
+
+    private RawBinaryComparatorFactory() {
+    }
+
+    @Override
+    public IBinaryComparator createBinaryComparator() {
+        return new IBinaryComparator() {
+
+            @Override
+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+                int commonLength = Math.min(l1, l2);
+                for (int i = 0; i < commonLength; i++) {
+                    if (b1[s1 + i] != b2[s2 + i]) {
+                        return b1[s1 + i] - b2[s2 + i];
+                    }
+                }
+                int difference = l1 - l2;
+                return difference == 0 ? 0 : (difference > 0 ? 1 : -1);
+            }
+
+        };
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryHashFunctionFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..7895fec
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/RawBinaryHashFunctionFactory.java
@@ -0,0 +1,29 @@
+package edu.uci.ics.hyracks.hdfs.lib;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+
+public class RawBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {
+    private static final long serialVersionUID = 1L;
+
+    public static IBinaryHashFunctionFactory INSTANCE = new RawBinaryHashFunctionFactory();
+
+    private RawBinaryHashFunctionFactory() {
+    }
+
+    @Override
+    public IBinaryHashFunction createBinaryHashFunction() {
+
+        return new IBinaryHashFunction() {
+            @Override
+            public int hash(byte[] bytes, int offset, int length) {
+                int value = 1;
+                int end = offset + length;
+                for (int i = offset; i < end; i++)
+                    value = value * 31 + (int) bytes[i];
+                return value;
+            }
+        };
+    }
+
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextKeyValueParserFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextKeyValueParserFactory.java
new file mode 100644
index 0000000..9574bb4
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextKeyValueParserFactory.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.lib;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParser;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParserFactory;
+
+public class TextKeyValueParserFactory implements IKeyValueParserFactory<LongWritable, Text> {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public IKeyValueParser<LongWritable, Text> createKeyValueParser(final IHyracksTaskContext ctx) {
+
+        final ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
+        final ByteBuffer buffer = ctx.allocateFrame();
+        final FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        appender.reset(buffer, true);
+
+        return new IKeyValueParser<LongWritable, Text>() {
+
+            @Override
+            public void open(IFrameWriter writer) {
+
+            }
+
+            @Override
+            public void parse(LongWritable key, Text value, IFrameWriter writer) throws HyracksDataException {
+                tb.reset();
+                tb.addField(value.getBytes(), 0, value.getLength());
+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                    FrameUtils.flushFrame(buffer, writer);
+                    appender.reset(buffer, true);
+                    if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                        throw new HyracksDataException("tuple cannot be appended into the frame");
+                    }
+                }
+            }
+
+            @Override
+            public void close(IFrameWriter writer) throws HyracksDataException {
+                FrameUtils.flushFrame(buffer, writer);
+            }
+
+        };
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextTupleWriterFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextTupleWriterFactory.java
new file mode 100644
index 0000000..0da14e5
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/lib/TextTupleWriterFactory.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.lib;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriter;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriterFactory;
+
+public class TextTupleWriterFactory implements ITupleWriterFactory {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public ITupleWriter getTupleWriter(IHyracksTaskContext ctx) {
+        return new ITupleWriter() {
+            private byte newLine = "\n".getBytes()[0];
+
+            @Override
+            public void open(DataOutput output) {
+
+            }
+
+            @Override
+            public void write(DataOutput output, ITupleReference tuple) throws HyracksDataException {
+                byte[] data = tuple.getFieldData(0);
+                int start = tuple.getFieldStart(0);
+                int len = tuple.getFieldLength(0);
+                try {
+                    output.write(data, start, len);
+                    output.writeByte(newLine);
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void close(DataOutput output) {
+
+            }
+
+        };
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/IPProximityNcCollectionBuilder.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/IPProximityNcCollectionBuilder.java
new file mode 100644
index 0000000..320b48b
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/IPProximityNcCollectionBuilder.java
@@ -0,0 +1,121 @@
+package edu.uci.ics.hyracks.hdfs.scheduler;
+
+import java.net.InetAddress;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TreeMap;
+
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.mapred.InputSplit;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.hdfs.api.INcCollection;
+import edu.uci.ics.hyracks.hdfs.api.INcCollectionBuilder;
+
+@SuppressWarnings("deprecation")
+public class IPProximityNcCollectionBuilder implements INcCollectionBuilder {
+
+    @Override
+    public INcCollection build(Map<String, NodeControllerInfo> ncNameToNcInfos,
+            final Map<String, List<String>> ipToNcMapping, final Map<String, Integer> ncNameToIndex, String[] NCs,
+            final int[] workloads, final int slotLimit) {
+        final TreeMap<BytesWritable, IntWritable> availableIpsToSlots = new TreeMap<BytesWritable, IntWritable>();
+        for (int i = 0; i < workloads.length; i++) {
+            if (workloads[i] < slotLimit) {
+                BytesWritable ip = new BytesWritable(ncNameToNcInfos.get(NCs[i]).getNetworkAddress().getIpAddress());
+                IntWritable availableSlot = availableIpsToSlots.get(ip);
+                if (availableSlot == null) {
+                    availableSlot = new IntWritable(slotLimit - workloads[i]);
+                    availableIpsToSlots.put(ip, availableSlot);
+                } else {
+                    availableSlot.set(slotLimit - workloads[i] + availableSlot.get());
+                }
+            }
+        }
+        return new INcCollection() {
+
+            @Override
+            public String findNearestAvailableSlot(InputSplit split) {
+                try {
+                    String[] locs = split.getLocations();
+                    int minDistance = Integer.MAX_VALUE;
+                    BytesWritable currentCandidateIp = null;
+                    if (locs == null || locs.length > 0) {
+                        for (int j = 0; j < locs.length; j++) {
+                            /**
+                             * get all the IP addresses from the name
+                             */
+                            InetAddress[] allIps = InetAddress.getAllByName(locs[j]);
+                            for (InetAddress ip : allIps) {
+                                BytesWritable splitIp = new BytesWritable(ip.getAddress());
+                                /**
+                                 * if the node controller exists
+                                 */
+                                BytesWritable candidateNcIp = availableIpsToSlots.floorKey(splitIp);
+                                if (candidateNcIp == null) {
+                                    candidateNcIp = availableIpsToSlots.ceilingKey(splitIp);
+                                }
+                                if (candidateNcIp != null) {
+                                    if (availableIpsToSlots.get(candidateNcIp).get() > 0) {
+                                        byte[] candidateIP = candidateNcIp.getBytes();
+                                        byte[] splitIP = splitIp.getBytes();
+                                        int candidateInt = candidateIP[0] << 24 | (candidateIP[1] & 0xFF) << 16
+                                                | (candidateIP[2] & 0xFF) << 8 | (candidateIP[3] & 0xFF);
+                                        int splitInt = splitIP[0] << 24 | (splitIP[1] & 0xFF) << 16
+                                                | (splitIP[2] & 0xFF) << 8 | (splitIP[3] & 0xFF);
+                                        int distance = Math.abs(candidateInt - splitInt);
+                                        if (minDistance > distance) {
+                                            minDistance = distance;
+                                            currentCandidateIp = candidateNcIp;
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                    } else {
+                        for (Entry<BytesWritable, IntWritable> entry : availableIpsToSlots.entrySet()) {
+                            if (entry.getValue().get() > 0) {
+                                currentCandidateIp = entry.getKey();
+                                break;
+                            }
+                        }
+                    }
+
+                    if (currentCandidateIp != null) {
+                        /**
+                         * Update the entry of the selected IP
+                         */
+                        IntWritable availableSlot = availableIpsToSlots.get(currentCandidateIp);
+                        availableSlot.set(availableSlot.get() - 1);
+                        if (availableSlot.get() == 0) {
+                            availableIpsToSlots.remove(currentCandidateIp);
+                        }
+                        /**
+                         * Update the entry of the selected NC
+                         */
+                        List<String> dataLocations = ipToNcMapping.get(InetAddress.getByAddress(
+                                currentCandidateIp.getBytes()).getHostAddress());
+                        for (String nc : dataLocations) {
+                            int ncIndex = ncNameToIndex.get(nc);
+                            if (workloads[ncIndex] < slotLimit) {
+                                return nc;
+                            }
+                        }
+                    }
+                    /** not scheduled */
+                    return null;
+                } catch (Exception e) {
+                    throw new IllegalStateException(e);
+                }
+            }
+
+            @Override
+            public int numAvailableSlots() {
+                return availableIpsToSlots.size();
+            }
+
+        };
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
new file mode 100644
index 0000000..fa5e7ae
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
@@ -0,0 +1,211 @@
+package edu.uci.ics.hyracks.hdfs.scheduler;
+
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TreeMap;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.mapred.InputSplit;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.topology.ClusterTopology;
+import edu.uci.ics.hyracks.hdfs.api.INcCollection;
+import edu.uci.ics.hyracks.hdfs.api.INcCollectionBuilder;
+
+@SuppressWarnings("deprecation")
+public class RackAwareNcCollectionBuilder implements INcCollectionBuilder {
+    private static final Logger LOGGER = Logger.getLogger(RackAwareNcCollectionBuilder.class.getName());
+    private ClusterTopology topology;
+
+    public RackAwareNcCollectionBuilder(ClusterTopology topology) {
+        this.topology = topology;
+    }
+
+    @Override
+    public INcCollection build(Map<String, NodeControllerInfo> ncNameToNcInfos,
+            final Map<String, List<String>> ipToNcMapping, final Map<String, Integer> ncNameToIndex, String[] NCs,
+            final int[] workloads, final int slotLimit) {
+        try {
+            final Map<List<Integer>, List<String>> pathToNCs = new HashMap<List<Integer>, List<String>>();
+            for (int i = 0; i < NCs.length; i++) {
+                List<Integer> path = new ArrayList<Integer>();
+                String ipAddress = InetAddress.getByAddress(
+                        ncNameToNcInfos.get(NCs[i]).getNetworkAddress().getIpAddress()).getHostAddress();
+                topology.lookupNetworkTerminal(ipAddress, path);
+                if (path.size() <= 0) {
+                    // if the hyracks nc is not in the defined cluster
+                    path.add(Integer.MIN_VALUE);
+                    LOGGER.info(NCs[i] + "'s IP address is not in the cluster toplogy file!");
+                }
+                List<String> ncs = pathToNCs.get(path);
+                if (ncs == null) {
+                    ncs = new ArrayList<String>();
+                    pathToNCs.put(path, ncs);
+                }
+                ncs.add(NCs[i]);
+            }
+
+            final TreeMap<List<Integer>, IntWritable> availableIpsToSlots = new TreeMap<List<Integer>, IntWritable>(
+                    new Comparator<List<Integer>>() {
+
+                        @Override
+                        public int compare(List<Integer> l1, List<Integer> l2) {
+                            int commonLength = Math.min(l1.size(), l2.size());
+                            for (int i = 0; i < commonLength; i++) {
+                                Integer value1 = l1.get(i);
+                                Integer value2 = l2.get(i);
+                                int cmp = value1 > value2 ? 1 : (value1 < value2 ? -1 : 0);
+                                if (cmp != 0) {
+                                    return cmp;
+                                }
+                            }
+                            return l1.size() > l2.size() ? 1 : (l1.size() < l2.size() ? -1 : 0);
+                        }
+
+                    });
+            for (int i = 0; i < workloads.length; i++) {
+                if (workloads[i] < slotLimit) {
+                    List<Integer> path = new ArrayList<Integer>();
+                    String ipAddress = InetAddress.getByAddress(
+                            ncNameToNcInfos.get(NCs[i]).getNetworkAddress().getIpAddress()).getHostAddress();
+                    topology.lookupNetworkTerminal(ipAddress, path);
+                    if (path.size() <= 0) {
+                        // if the hyracks nc is not in the defined cluster
+                        path.add(Integer.MIN_VALUE);
+                    }
+                    IntWritable availableSlot = availableIpsToSlots.get(path);
+                    if (availableSlot == null) {
+                        availableSlot = new IntWritable(slotLimit - workloads[i]);
+                        availableIpsToSlots.put(path, availableSlot);
+                    } else {
+                        availableSlot.set(slotLimit - workloads[i] + availableSlot.get());
+                    }
+                }
+            }
+            return new INcCollection() {
+
+                @Override
+                public String findNearestAvailableSlot(InputSplit split) {
+                    try {
+                        String[] locs = split.getLocations();
+                        int minDistance = Integer.MAX_VALUE;
+                        List<Integer> currentCandidatePath = null;
+                        if (locs == null || locs.length > 0) {
+                            for (int j = 0; j < locs.length; j++) {
+                                /**
+                                 * get all the IP addresses from the name
+                                 */
+                                InetAddress[] allIps = InetAddress.getAllByName(locs[j]);
+                                boolean inTopology = false;
+                                for (InetAddress ip : allIps) {
+                                    List<Integer> splitPath = new ArrayList<Integer>();
+                                    boolean inCluster = topology.lookupNetworkTerminal(ip.getHostAddress(), splitPath);
+                                    if (!inCluster) {
+                                        continue;
+                                    }
+                                    inTopology = true;
+                                    /**
+                                     * if the node controller exists
+                                     */
+                                    List<Integer> candidatePath = availableIpsToSlots.floorKey(splitPath);
+                                    if (candidatePath == null) {
+                                        candidatePath = availableIpsToSlots.ceilingKey(splitPath);
+                                    }
+                                    if (candidatePath != null) {
+                                        if (availableIpsToSlots.get(candidatePath).get() > 0) {
+                                            int distance = distance(splitPath, candidatePath);
+                                            if (minDistance > distance) {
+                                                minDistance = distance;
+                                                currentCandidatePath = candidatePath;
+                                            }
+                                        }
+
+                                    }
+                                }
+
+                                if (!inTopology) {
+                                    LOGGER.info(locs[j] + "'s IP address is not in the cluster toplogy file!");
+                                    /**
+                                     * if the machine is not in the toplogy file
+                                     */
+                                    List<Integer> candidatePath = null;
+                                    for (Entry<List<Integer>, IntWritable> entry : availableIpsToSlots.entrySet()) {
+                                        if (entry.getValue().get() > 0) {
+                                            candidatePath = entry.getKey();
+                                            break;
+                                        }
+                                    }
+                                    /** the split path is empty */
+                                    if (candidatePath != null) {
+                                        if (availableIpsToSlots.get(candidatePath).get() > 0) {
+                                            currentCandidatePath = candidatePath;
+                                        }
+                                    }
+                                }
+                            }
+                        } else {
+                            for (Entry<List<Integer>, IntWritable> entry : availableIpsToSlots.entrySet()) {
+                                if (entry.getValue().get() > 0) {
+                                    currentCandidatePath = entry.getKey();
+                                    break;
+                                }
+                            }
+                        }
+
+                        if (currentCandidatePath != null && currentCandidatePath.size() > 0) {
+                            /**
+                             * Update the entry of the selected IP
+                             */
+                            IntWritable availableSlot = availableIpsToSlots.get(currentCandidatePath);
+                            availableSlot.set(availableSlot.get() - 1);
+                            if (availableSlot.get() == 0) {
+                                availableIpsToSlots.remove(currentCandidatePath);
+                            }
+                            /**
+                             * Update the entry of the selected NC
+                             */
+                            List<String> candidateNcs = pathToNCs.get(currentCandidatePath);
+                            for (String candidate : candidateNcs) {
+                                int ncIndex = ncNameToIndex.get(candidate);
+                                if (workloads[ncIndex] < slotLimit) {
+                                    return candidate;
+                                }
+                            }
+                        }
+                        /** not scheduled */
+                        return null;
+                    } catch (Exception e) {
+                        throw new IllegalStateException(e);
+                    }
+                }
+
+                @Override
+                public int numAvailableSlots() {
+                    return availableIpsToSlots.size();
+                }
+
+                private int distance(List<Integer> splitPath, List<Integer> candidatePath) {
+                    int commonLength = Math.min(splitPath.size(), candidatePath.size());
+                    int distance = 0;
+                    for (int i = 0; i < commonLength; i++) {
+                        distance = distance * 100 + Math.abs(splitPath.get(i) - candidatePath.get(i));
+                    }
+                    List<Integer> restElements = splitPath.size() > candidatePath.size() ? splitPath : candidatePath;
+                    for (int i = commonLength; i < restElements.size(); i++) {
+                        distance = distance * 100 + Math.abs(restElements.get(i));
+                    }
+                    return distance;
+                }
+            };
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/Scheduler.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/Scheduler.java
new file mode 100644
index 0000000..3f7997b
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs/scheduler/Scheduler.java
@@ -0,0 +1,404 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.scheduler;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+import java.util.Random;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.mapred.InputSplit;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.topology.ClusterTopology;
+import edu.uci.ics.hyracks.hdfs.api.INcCollection;
+import edu.uci.ics.hyracks.hdfs.api.INcCollectionBuilder;
+
+/**
+ * The scheduler conduct data-local scheduling for data reading on HDFS. This
+ * class works for Hadoop old API.
+ */
+@SuppressWarnings("deprecation")
+public class Scheduler {
+    private static final Logger LOGGER = Logger.getLogger(Scheduler.class.getName());
+
+    /** a list of NCs */
+    private String[] NCs;
+
+    /** a map from ip to NCs */
+    private Map<String, List<String>> ipToNcMapping = new HashMap<String, List<String>>();
+
+    /** a map from the NC name to the index */
+    private Map<String, Integer> ncNameToIndex = new HashMap<String, Integer>();
+
+    /** a map from NC name to the NodeControllerInfo */
+    private Map<String, NodeControllerInfo> ncNameToNcInfos;
+
+    /**
+     * the nc collection builder
+     */
+    private INcCollectionBuilder ncCollectionBuilder;
+
+    /**
+     * The constructor of the scheduler.
+     * 
+     * @param ncNameToNcInfos
+     * @throws HyracksException
+     */
+    public Scheduler(String ipAddress, int port) throws HyracksException {
+        try {
+            IHyracksClientConnection hcc = new HyracksConnection(ipAddress, port);
+            this.ncNameToNcInfos = hcc.getNodeControllerInfos();
+            ClusterTopology topology = hcc.getClusterTopology();
+            this.ncCollectionBuilder = topology == null ? new IPProximityNcCollectionBuilder()
+                    : new RackAwareNcCollectionBuilder(topology);
+            loadIPAddressToNCMap(ncNameToNcInfos);
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+    }
+
+    /**
+     * The constructor of the scheduler.
+     * 
+     * @param ncNameToNcInfos
+     * @throws HyracksException
+     */
+    public Scheduler(String ipAddress, int port, INcCollectionBuilder ncCollectionBuilder) throws HyracksException {
+        try {
+            IHyracksClientConnection hcc = new HyracksConnection(ipAddress, port);
+            this.ncNameToNcInfos = hcc.getNodeControllerInfos();
+            this.ncCollectionBuilder = ncCollectionBuilder;
+            loadIPAddressToNCMap(ncNameToNcInfos);
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+    }
+
+    /**
+     * The constructor of the scheduler.
+     * 
+     * @param ncNameToNcInfos
+     *            the mapping from nc names to nc infos
+     * @throws HyracksException
+     */
+    public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos) throws HyracksException {
+        this.ncNameToNcInfos = ncNameToNcInfos;
+        this.ncCollectionBuilder = new IPProximityNcCollectionBuilder();
+        loadIPAddressToNCMap(ncNameToNcInfos);
+    }
+
+    /**
+     * The constructor of the scheduler.
+     * 
+     * @param ncNameToNcInfos
+     *            the mapping from nc names to nc infos
+     * @param topology
+     *            the hyracks cluster toplogy
+     * @throws HyracksException
+     */
+    public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos, ClusterTopology topology) throws HyracksException {
+        this(ncNameToNcInfos);
+        this.ncCollectionBuilder = topology == null ? new IPProximityNcCollectionBuilder()
+                : new RackAwareNcCollectionBuilder(topology);
+    }
+
+    /**
+     * The constructor of the scheduler.
+     * 
+     * @param ncNameToNcInfos
+     *            the mapping from nc names to nc infos
+     * @throws HyracksException
+     */
+    public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos, INcCollectionBuilder ncCollectionBuilder)
+            throws HyracksException {
+        this.ncNameToNcInfos = ncNameToNcInfos;
+        this.ncCollectionBuilder = ncCollectionBuilder;
+        loadIPAddressToNCMap(ncNameToNcInfos);
+    }
+
+    /**
+     * Set location constraints for a file scan operator with a list of file
+     * splits. It guarantees the maximum slots a machine can is at most one more
+     * than the minimum slots a machine can get.
+     * 
+     * @throws HyracksDataException
+     */
+    public String[] getLocationConstraints(InputSplit[] splits) throws HyracksException {
+        if (splits == null) {
+            /** deal the case when the splits array is null */
+            return new String[] {};
+        }
+        int[] workloads = new int[NCs.length];
+        Arrays.fill(workloads, 0);
+        String[] locations = new String[splits.length];
+        Map<String, IntWritable> locationToNumOfSplits = new HashMap<String, IntWritable>();
+        /**
+         * upper bound number of slots that a machine can get
+         */
+        int upperBoundSlots = splits.length % workloads.length == 0 ? (splits.length / workloads.length)
+                : (splits.length / workloads.length + 1);
+        /**
+         * lower bound number of slots that a machine can get
+         */
+        int lowerBoundSlots = splits.length % workloads.length == 0 ? upperBoundSlots : upperBoundSlots - 1;
+
+        try {
+            Random random = new Random(System.currentTimeMillis());
+            boolean scheduled[] = new boolean[splits.length];
+            Arrays.fill(scheduled, false);
+            /**
+             * scan the splits and build the popularity map
+             * give the machines with less local splits more scheduling priority
+             */
+            buildPopularityMap(splits, locationToNumOfSplits);
+            /**
+             * push data-local lower-bounds slots to each machine
+             */
+            scheduleLocalSlots(splits, workloads, locations, lowerBoundSlots, random, scheduled, locationToNumOfSplits);
+            /**
+             * push data-local upper-bounds slots to each machine
+             */
+            scheduleLocalSlots(splits, workloads, locations, upperBoundSlots, random, scheduled, locationToNumOfSplits);
+
+            int dataLocalCount = 0;
+            for (int i = 0; i < scheduled.length; i++) {
+                if (scheduled[i] == true) {
+                    dataLocalCount++;
+                }
+            }
+            LOGGER.info("Data local rate: "
+                    + (scheduled.length == 0 ? 0.0 : ((float) dataLocalCount / (float) (scheduled.length))));
+            /**
+             * push non-data-local lower-bounds slots to each machine
+             */
+            scheduleNonLocalSlots(splits, workloads, locations, lowerBoundSlots, scheduled);
+            /**
+             * push non-data-local upper-bounds slots to each machine
+             */
+            scheduleNonLocalSlots(splits, workloads, locations, upperBoundSlots, scheduled);
+            return locations;
+        } catch (IOException e) {
+            throw new HyracksException(e);
+        }
+    }
+
+    /**
+     * Schedule non-local slots to each machine
+     * 
+     * @param splits
+     *            The HDFS file splits.
+     * @param workloads
+     *            The current capacity of each machine.
+     * @param locations
+     *            The result schedule.
+     * @param slotLimit
+     *            The maximum slots of each machine.
+     * @param scheduled
+     *            Indicate which slot is scheduled.
+     */
+    private void scheduleNonLocalSlots(InputSplit[] splits, int[] workloads, String[] locations, int slotLimit,
+            boolean[] scheduled) throws IOException, UnknownHostException {
+        /**
+         * build the map from available ips to the number of available slots
+         */
+        INcCollection ncCollection = this.ncCollectionBuilder.build(ncNameToNcInfos, ipToNcMapping, ncNameToIndex, NCs,
+                workloads, slotLimit);
+        if (ncCollection.numAvailableSlots() == 0) {
+            return;
+        }
+        /**
+         * schedule no-local file reads
+         */
+        for (int i = 0; i < splits.length; i++) {
+            /** if there is no data-local NC choice, choose a random one */
+            if (!scheduled[i]) {
+                InputSplit split = splits[i];
+                String selectedNcName = ncCollection.findNearestAvailableSlot(split);
+                if (selectedNcName != null) {
+                    int ncIndex = ncNameToIndex.get(selectedNcName);
+                    workloads[ncIndex]++;
+                    scheduled[i] = true;
+                    locations[i] = selectedNcName;
+                }
+            }
+        }
+    }
+
+    /**
+     * Schedule data-local slots to each machine.
+     * 
+     * @param splits
+     *            The HDFS file splits.
+     * @param workloads
+     *            The current capacity of each machine.
+     * @param locations
+     *            The result schedule.
+     * @param slots
+     *            The maximum slots of each machine.
+     * @param random
+     *            The random generator.
+     * @param scheduled
+     *            Indicate which slot is scheduled.
+     * @throws IOException
+     * @throws UnknownHostException
+     */
+    private void scheduleLocalSlots(InputSplit[] splits, int[] workloads, String[] locations, int slots, Random random,
+            boolean[] scheduled, final Map<String, IntWritable> locationToNumSplits) throws IOException,
+            UnknownHostException {
+        /** scheduling candidates will be ordered inversely according to their popularity */
+        PriorityQueue<String> scheduleCadndiates = new PriorityQueue<String>(3, new Comparator<String>() {
+
+            @Override
+            public int compare(String s1, String s2) {
+                return locationToNumSplits.get(s1).compareTo(locationToNumSplits.get(s2));
+            }
+
+        });
+        for (int i = 0; i < splits.length; i++) {
+            if (scheduled[i]) {
+                continue;
+            }
+            /**
+             * get the location of all the splits
+             */
+            String[] locs = splits[i].getLocations();
+            if (locs.length > 0) {
+                scheduleCadndiates.clear();
+                for (int j = 0; j < locs.length; j++) {
+                    scheduleCadndiates.add(locs[j]);
+                }
+
+                for (String candidate : scheduleCadndiates) {
+                    /**
+                     * get all the IP addresses from the name
+                     */
+                    InetAddress[] allIps = InetAddress.getAllByName(candidate);
+                    /**
+                     * iterate overa all ips
+                     */
+                    for (InetAddress ip : allIps) {
+                        /**
+                         * if the node controller exists
+                         */
+                        if (ipToNcMapping.get(ip.getHostAddress()) != null) {
+                            /**
+                             * set the ncs
+                             */
+                            List<String> dataLocations = ipToNcMapping.get(ip.getHostAddress());
+                            int arrayPos = random.nextInt(dataLocations.size());
+                            String nc = dataLocations.get(arrayPos);
+                            int pos = ncNameToIndex.get(nc);
+                            /**
+                             * check if the node is already full
+                             */
+                            if (workloads[pos] < slots) {
+                                locations[i] = nc;
+                                workloads[pos]++;
+                                scheduled[i] = true;
+                                break;
+                            }
+                        }
+                    }
+                    /**
+                     * break the loop for data-locations if the schedule has
+                     * already been found
+                     */
+                    if (scheduled[i] == true) {
+                        break;
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * Scan the splits once and build a popularity map
+     * 
+     * @param splits
+     *            the split array
+     * @param locationToNumOfSplits
+     *            the map to be built
+     * @throws IOException
+     */
+    private void buildPopularityMap(InputSplit[] splits, Map<String, IntWritable> locationToNumOfSplits)
+            throws IOException {
+        for (InputSplit split : splits) {
+            String[] locations = split.getLocations();
+            for (String loc : locations) {
+                IntWritable locCount = locationToNumOfSplits.get(loc);
+                if (locCount == null) {
+                    locCount = new IntWritable(0);
+                    locationToNumOfSplits.put(loc, locCount);
+                }
+                locCount.set(locCount.get() + 1);
+            }
+        }
+    }
+
+    /**
+     * Load the IP-address-to-NC map from the NCNameToNCInfoMap
+     * 
+     * @param ncNameToNcInfos
+     * @throws HyracksException
+     */
+    private void loadIPAddressToNCMap(Map<String, NodeControllerInfo> ncNameToNcInfos) throws HyracksException {
+        try {
+            NCs = new String[ncNameToNcInfos.size()];
+            ipToNcMapping.clear();
+            ncNameToIndex.clear();
+            int i = 0;
+
+            /**
+             * build the IP address to NC map
+             */
+            for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos.entrySet()) {
+                String ipAddr = InetAddress.getByAddress(entry.getValue().getNetworkAddress().getIpAddress())
+                        .getHostAddress();
+                List<String> matchedNCs = ipToNcMapping.get(ipAddr);
+                if (matchedNCs == null) {
+                    matchedNCs = new ArrayList<String>();
+                    ipToNcMapping.put(ipAddr, matchedNCs);
+                }
+                matchedNCs.add(entry.getKey());
+                NCs[i] = entry.getKey();
+                i++;
+            }
+
+            /**
+             * set up the NC name to index mapping
+             */
+            for (i = 0; i < NCs.length; i++) {
+                ncNameToIndex.put(NCs[i], i);
+            }
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/ConfFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/ConfFactory.java
new file mode 100644
index 0000000..d843d27
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/ConfFactory.java
@@ -0,0 +1,40 @@
+package edu.uci.ics.hyracks.hdfs2.dataflow;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.Serializable;
+
+import org.apache.hadoop.mapreduce.Job;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class ConfFactory implements Serializable {
+    private static final long serialVersionUID = 1L;
+    private byte[] confBytes;
+
+    public ConfFactory(Job conf) throws HyracksDataException {
+        try {
+            ByteArrayOutputStream bos = new ByteArrayOutputStream();
+            DataOutputStream dos = new DataOutputStream(bos);
+            conf.getConfiguration().write(dos);
+            confBytes = bos.toByteArray();
+            dos.close();
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    public Job getConf() throws HyracksDataException {
+        try {
+            Job conf = new Job();
+            DataInputStream dis = new DataInputStream(new ByteArrayInputStream(confBytes));
+            conf.getConfiguration().readFields(dis);
+            dis.close();
+            return conf;
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/FileSplitsFactory.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/FileSplitsFactory.java
new file mode 100644
index 0000000..14dc70c
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/FileSplitsFactory.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.dataflow;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.Serializable;
+import java.lang.reflect.Constructor;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+@SuppressWarnings("rawtypes")
+public class FileSplitsFactory implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+    private byte[] splitBytes;
+    private String splitClassName;
+
+    public FileSplitsFactory(List<FileSplit> splits) throws HyracksDataException {
+        splitBytes = splitsToBytes(splits);
+        if (splits.size() > 0) {
+            splitClassName = splits.get(0).getClass().getName();
+        }
+    }
+
+    public List<FileSplit> getSplits() throws HyracksDataException {
+        return bytesToSplits(splitBytes);
+    }
+
+    /**
+     * Convert splits to bytes.
+     * 
+     * @param splits
+     *            input splits
+     * @return bytes which serialize the splits
+     * @throws IOException
+     */
+    private byte[] splitsToBytes(List<FileSplit> splits) throws HyracksDataException {
+        try {
+            ByteArrayOutputStream bos = new ByteArrayOutputStream();
+            DataOutputStream dos = new DataOutputStream(bos);
+            dos.writeInt(splits.size());
+            int size = splits.size();
+            for (int i = 0; i < size; i++) {
+                splits.get(i).write(dos);
+            }
+            dos.close();
+            return bos.toByteArray();
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    /**
+     * Covert bytes to splits.
+     * 
+     * @param bytes
+     * @return
+     * @throws HyracksDataException
+     */
+    private List<FileSplit> bytesToSplits(byte[] bytes) throws HyracksDataException {
+        try {
+            Class splitClass = Class.forName(splitClassName);
+            Constructor[] constructors = splitClass.getDeclaredConstructors();
+            Constructor defaultConstructor = null;
+            for (Constructor constructor : constructors) {
+                if (constructor.getParameterTypes().length == 0) {
+                    constructor.setAccessible(true);
+                    defaultConstructor = constructor;
+                }
+            }
+            ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
+            DataInputStream dis = new DataInputStream(bis);
+            int size = dis.readInt();
+            List<FileSplit> splits = new ArrayList<FileSplit>();
+            for (int i = 0; i < size; i++) {
+                splits.add((FileSplit) defaultConstructor.newInstance());
+                splits.get(i).readFields(dis);
+            }
+            dis.close();
+            return splits;
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
new file mode 100644
index 0000000..9e9abdf
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
@@ -0,0 +1,160 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.dataflow;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParser;
+import edu.uci.ics.hyracks.hdfs.api.IKeyValueParserFactory;
+
+/**
+ * The HDFS file read operator using the Hadoop new API. To use this operator, a
+ * user need to provide an IKeyValueParserFactory implementation which convert
+ * key-value pairs into tuples.
+ */
+@SuppressWarnings("rawtypes")
+public class HDFSReadOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private final ConfFactory confFactory;
+    private final FileSplitsFactory splitsFactory;
+    private final String[] scheduledLocations;
+    private final IKeyValueParserFactory tupleParserFactory;
+    private final boolean[] executed;
+
+    /**
+     * The constructor of HDFSReadOperatorDescriptor.
+     * 
+     * @param spec
+     *            the JobSpecification object
+     * @param rd
+     *            the output record descriptor
+     * @param conf
+     *            the Hadoop JobConf object, which contains the input format and
+     *            the input paths
+     * @param splits
+     *            the array of FileSplits (HDFS chunks).
+     * @param scheduledLocations
+     *            the node controller names to scan the FileSplits, which is an
+     *            one-to-one mapping. The String array is obtained from the
+     *            edu.cui
+     *            .ics.hyracks.hdfs.scheduler.Scheduler.getLocationConstraints
+     *            (InputSplits[]).
+     * @param tupleParserFactory
+     *            the ITupleParserFactory implementation instance.
+     * @throws HyracksException
+     */
+    public HDFSReadOperatorDescriptor(JobSpecification spec, RecordDescriptor rd, Job conf, List<InputSplit> splits,
+            String[] scheduledLocations, IKeyValueParserFactory tupleParserFactory) throws HyracksException {
+        super(spec, 0, 1);
+        try {
+            List<FileSplit> fileSplits = new ArrayList<FileSplit>();
+            for (int i = 0; i < splits.size(); i++) {
+                fileSplits.add((FileSplit) splits.get(i));
+            }
+            this.splitsFactory = new FileSplitsFactory(fileSplits);
+            this.confFactory = new ConfFactory(conf);
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+        this.scheduledLocations = scheduledLocations;
+        this.executed = new boolean[scheduledLocations.length];
+        Arrays.fill(executed, false);
+        this.tupleParserFactory = tupleParserFactory;
+        this.recordDescriptors[0] = rd;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+            throws HyracksDataException {
+        final List<FileSplit> inputSplits = splitsFactory.getSplits();
+
+        return new AbstractUnaryOutputSourceOperatorNodePushable() {
+            private String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
+            private ContextFactory ctxFactory = new ContextFactory();
+
+            @SuppressWarnings("unchecked")
+            @Override
+            public void initialize() throws HyracksDataException {
+                ClassLoader ctxCL = Thread.currentThread().getContextClassLoader();
+                try {
+                    Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+                    Job job = confFactory.getConf();
+                    IKeyValueParser parser = tupleParserFactory.createKeyValueParser(ctx);
+                    writer.open();
+                    InputFormat inputFormat = ReflectionUtils.newInstance(job.getInputFormatClass(),
+                            job.getConfiguration());
+                    int size = inputSplits.size();
+                    for (int i = 0; i < size; i++) {
+                        /**
+                         * read all the partitions scheduled to the current node
+                         */
+                        if (scheduledLocations[i].equals(nodeName)) {
+                            /**
+                             * pick an unread split to read synchronize among
+                             * simultaneous partitions in the same machine
+                             */
+                            synchronized (executed) {
+                                if (executed[i] == false) {
+                                    executed[i] = true;
+                                } else {
+                                    continue;
+                                }
+                            }
+
+                            /**
+                             * read the split
+                             */
+                            TaskAttemptContext context = ctxFactory.createContext(job.getConfiguration(), i);
+                            RecordReader reader = inputFormat.createRecordReader(inputSplits.get(i), context);
+                            reader.initialize(inputSplits.get(i), context);
+                            while (reader.nextKeyValue() == true) {
+                                parser.parse(reader.getCurrentKey(), reader.getCurrentValue(), writer);
+                            }
+                        }
+                    }
+                    parser.close(writer);
+                    writer.close();
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
+                }
+            }
+        };
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSWriteOperatorDescriptor.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSWriteOperatorDescriptor.java
new file mode 100644
index 0000000..c1c227c
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSWriteOperatorDescriptor.java
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.dataflow;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriter;
+import edu.uci.ics.hyracks.hdfs.api.ITupleWriterFactory;
+
+/**
+ * The HDFS file write operator using the Hadoop new API. To use this operator,
+ * a user need to provide an ITupleWriterFactory.
+ */
+public class HDFSWriteOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private ConfFactory confFactory;
+    private ITupleWriterFactory tupleWriterFactory;
+
+    /**
+     * The constructor of HDFSWriteOperatorDescriptor.
+     * 
+     * @param spec
+     *            the JobSpecification object
+     * @param conf
+     *            the Hadoop JobConf which contains the output path
+     * @param tupleWriterFactory
+     *            the ITupleWriterFactory implementation object
+     * @throws HyracksException
+     */
+    public HDFSWriteOperatorDescriptor(JobSpecification spec, Job conf, ITupleWriterFactory tupleWriterFactory)
+            throws HyracksException {
+        super(spec, 1, 0);
+        this.confFactory = new ConfFactory(conf);
+        this.tupleWriterFactory = tupleWriterFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+            throws HyracksDataException {
+
+        return new AbstractUnaryInputSinkOperatorNodePushable() {
+
+            private FSDataOutputStream dos;
+            private RecordDescriptor inputRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);;
+            private FrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), inputRd);
+            private FrameTupleReference tuple = new FrameTupleReference();
+            private ITupleWriter tupleWriter;
+            private ClassLoader ctxCL;
+
+            @Override
+            public void open() throws HyracksDataException {
+                ctxCL = Thread.currentThread().getContextClassLoader();
+                Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+                Job conf = confFactory.getConf();
+                String outputPath = FileOutputFormat.getOutputPath(conf).toString();
+                String fileName = outputPath + File.separator + "part-" + partition;
+
+                tupleWriter = tupleWriterFactory.getTupleWriter(ctx);
+                try {
+                    FileSystem dfs = FileSystem.get(conf.getConfiguration());
+                    dos = dfs.create(new Path(fileName), true);
+                    tupleWriter.open(dos);
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                accessor.reset(buffer);
+                int tupleCount = accessor.getTupleCount();
+                for (int i = 0; i < tupleCount; i++) {
+                    tuple.reset(accessor, i);
+                    tupleWriter.write(dos, tuple);
+                }
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                try {
+                    tupleWriter.close(dos);
+                    dos.close();
+                } catch (Exception e) {
+                    throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
+                }
+            }
+
+        };
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/scheduler/Scheduler.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/scheduler/Scheduler.java
new file mode 100644
index 0000000..cde187d
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/scheduler/Scheduler.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.scheduler;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.mapreduce.InputSplit;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.hdfs.api.INcCollectionBuilder;
+
+/**
+ * The scheduler conduct data-local scheduling for data reading on HDFS.
+ * This class works for Hadoop new API.
+ */
+@SuppressWarnings("deprecation")
+public class Scheduler {
+
+    private edu.uci.ics.hyracks.hdfs.scheduler.Scheduler scheduler;
+
+    /**
+     * The constructor of the scheduler
+     * 
+     * @param ncNameToNcInfos
+     * @throws HyracksException
+     */
+    public Scheduler(String ipAddress, int port) throws HyracksException {
+        scheduler = new edu.uci.ics.hyracks.hdfs.scheduler.Scheduler(ipAddress, port);
+    }
+
+    /**
+     * The constructor of the scheduler.
+     * 
+     * @param ncNameToNcInfos
+     *            the mapping from nc names to nc infos
+     * @throws HyracksException
+     */
+    public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos) throws HyracksException {
+        scheduler = new edu.uci.ics.hyracks.hdfs.scheduler.Scheduler(ncNameToNcInfos);
+    }
+    
+    /**
+     * The constructor of the scheduler.
+     * 
+     * @param ncNameToNcInfos
+     *            the mapping from nc names to nc infos
+     * @throws HyracksException
+     */
+    public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos, INcCollectionBuilder builder) throws HyracksException {
+        scheduler = new edu.uci.ics.hyracks.hdfs.scheduler.Scheduler(ncNameToNcInfos, builder);
+    }
+
+    /**
+     * Set location constraints for a file scan operator with a list of file splits
+     * 
+     * @throws HyracksDataException
+     */
+    public String[] getLocationConstraints(List<InputSplit> splits) throws HyracksException {
+        try {
+            org.apache.hadoop.mapred.InputSplit[] inputSplits = new org.apache.hadoop.mapred.InputSplit[splits.size()];
+            for (int i = 0; i < inputSplits.length; i++)
+                inputSplits[i] = new WrappedFileSplit(splits.get(i).getLocations(), splits.get(i).getLength());
+            return scheduler.getLocationConstraints(inputSplits);
+        } catch (Exception e) {
+            throw new HyracksException(e);
+        }
+    }
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/scheduler/WrappedFileSplit.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/scheduler/WrappedFileSplit.java
new file mode 100644
index 0000000..1deb469
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/scheduler/WrappedFileSplit.java
@@ -0,0 +1,51 @@
+package edu.uci.ics.hyracks.hdfs2.scheduler;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.InputSplit;
+
+/**
+ * The wrapped implementation of InputSplit, for the new API scheduler
+ * to reuse the old API scheduler
+ */
+@SuppressWarnings("deprecation")
+public class WrappedFileSplit implements InputSplit {
+
+    private String[] locations;
+    private long length;
+
+    public WrappedFileSplit(String[] locations, long length) {
+        this.locations = locations;
+        this.length = length;
+    }
+
+    @Override
+    public void readFields(DataInput input) throws IOException {
+        int len = input.readInt();
+        locations = new String[len];
+        for (int i = 0; i < len; i++)
+            locations[i] = input.readUTF();
+        length = input.readLong();
+    }
+
+    @Override
+    public void write(DataOutput output) throws IOException {
+        output.write(locations.length);
+        for (int i = 0; i < locations.length; i++)
+            output.writeUTF(locations[i]);
+        output.writeLong(length);
+    }
+
+    @Override
+    public long getLength() throws IOException {
+        return length;
+    }
+
+    @Override
+    public String[] getLocations() throws IOException {
+        return locations;
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/dataflow/DataflowTest.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/dataflow/DataflowTest.java
new file mode 100644
index 0000000..2686077
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/dataflow/DataflowTest.java
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.dataflow;
+
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.hdfs.lib.RawBinaryComparatorFactory;
+import edu.uci.ics.hyracks.hdfs.lib.RawBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.hdfs.lib.TextKeyValueParserFactory;
+import edu.uci.ics.hyracks.hdfs.lib.TextTupleWriterFactory;
+import edu.uci.ics.hyracks.hdfs.scheduler.Scheduler;
+import edu.uci.ics.hyracks.hdfs.utils.HyracksUtils;
+import edu.uci.ics.hyracks.hdfs.utils.TestUtils;
+
+/**
+ * Test the edu.uci.ics.hyracks.hdfs.dataflow package,
+ * the operators for the Hadoop old API.
+ */
+@SuppressWarnings({ "deprecation" })
+public class DataflowTest extends TestCase {
+
+    private static final String ACTUAL_RESULT_DIR = "actual";
+    private static final String EXPECTED_RESULT_PATH = "src/test/resources/expected";
+    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
+
+    private static final String DATA_PATH = "src/test/resources/data/customer.tbl";
+    private static final String HDFS_INPUT_PATH = "/customer/";
+    private static final String HDFS_OUTPUT_PATH = "/customer_result/";
+
+    private static final String HYRACKS_APP_NAME = "DataflowTest";
+    private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
+    private MiniDFSCluster dfsCluster;
+
+    private JobConf conf = new JobConf();
+    private int numberOfNC = 2;
+
+    @Override
+    public void setUp() throws Exception {
+        cleanupStores();
+        HyracksUtils.init();
+        HyracksUtils.createApp(HYRACKS_APP_NAME);
+        FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
+        FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
+        startHDFS();
+    }
+
+    private void cleanupStores() throws IOException {
+        FileUtils.forceMkdir(new File("teststore"));
+        FileUtils.forceMkdir(new File("build"));
+        FileUtils.cleanDirectory(new File("teststore"));
+        FileUtils.cleanDirectory(new File("build"));
+    }
+
+    /**
+     * Start the HDFS cluster and setup the data files
+     * 
+     * @throws IOException
+     */
+    private void startHDFS() throws IOException {
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
+
+        FileSystem lfs = FileSystem.getLocal(new Configuration());
+        lfs.delete(new Path("build"), true);
+        System.setProperty("hadoop.log.dir", "logs");
+        dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
+        FileSystem dfs = FileSystem.get(conf);
+        Path src = new Path(DATA_PATH);
+        Path dest = new Path(HDFS_INPUT_PATH);
+        Path result = new Path(HDFS_OUTPUT_PATH);
+        dfs.mkdirs(dest);
+        dfs.mkdirs(result);
+        dfs.copyFromLocalFile(src, dest);
+
+        DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
+        conf.writeXml(confOutput);
+        confOutput.flush();
+        confOutput.close();
+    }
+
+    /**
+     * Test a job with only HDFS read and writes.
+     * 
+     * @throws Exception
+     */
+    public void testHDFSReadWriteOperators() throws Exception {
+        FileInputFormat.setInputPaths(conf, HDFS_INPUT_PATH);
+        FileOutputFormat.setOutputPath(conf, new Path(HDFS_OUTPUT_PATH));
+        conf.setInputFormat(TextInputFormat.class);
+
+        Scheduler scheduler = new Scheduler(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+        InputSplit[] splits = conf.getInputFormat().getSplits(conf, numberOfNC * 4);
+
+        String[] readSchedule = scheduler.getLocationConstraints(splits);
+        JobSpecification jobSpec = new JobSpecification();
+        RecordDescriptor recordDesc = new RecordDescriptor(
+                new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
+
+        String[] locations = new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID,
+                HyracksUtils.NC2_ID };
+        HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits,
+                readSchedule, new TextKeyValueParserFactory());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, readOperator, locations);
+
+        ExternalSortOperatorDescriptor sortOperator = new ExternalSortOperatorDescriptor(jobSpec, 10, new int[] { 0 },
+                new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, recordDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, sortOperator, locations);
+
+        HDFSWriteOperatorDescriptor writeOperator = new HDFSWriteOperatorDescriptor(jobSpec, conf,
+                new TextTupleWriterFactory());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, writeOperator, HyracksUtils.NC1_ID);
+
+        jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), readOperator, 0, sortOperator, 0);
+        jobSpec.connect(new MToNPartitioningMergingConnectorDescriptor(jobSpec, new FieldHashPartitionComputerFactory(
+                new int[] { 0 }, new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
+                new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }), sortOperator,
+                0, writeOperator, 0);
+        jobSpec.addRoot(writeOperator);
+
+        IHyracksClientConnection client = new HyracksConnection(HyracksUtils.CC_HOST,
+                HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+        JobId jobId = client.startJob(HYRACKS_APP_NAME, jobSpec);
+        client.waitForCompletion(jobId);
+
+        Assert.assertEquals(true, checkResults());
+    }
+
+    /**
+     * Check if the results are correct
+     * 
+     * @return true if correct
+     * @throws Exception
+     */
+    private boolean checkResults() throws Exception {
+        FileSystem dfs = FileSystem.get(conf);
+        Path result = new Path(HDFS_OUTPUT_PATH);
+        Path actual = new Path(ACTUAL_RESULT_DIR);
+        dfs.copyToLocalFile(result, actual);
+
+        TestUtils.compareWithResult(new File(EXPECTED_RESULT_PATH + File.separator + "part-0"), new File(
+                ACTUAL_RESULT_DIR + File.separator + "customer_result" + File.separator + "part-0"));
+        return true;
+    }
+
+    /**
+     * cleanup hdfs cluster
+     */
+    private void cleanupHDFS() throws Exception {
+        dfsCluster.shutdown();
+    }
+
+    @Override
+    public void tearDown() throws Exception {
+        HyracksUtils.destroyApp(HYRACKS_APP_NAME);
+        HyracksUtils.deinit();
+        cleanupHDFS();
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/scheduler/SchedulerTest.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/scheduler/SchedulerTest.java
new file mode 100644
index 0000000..1f394a9
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/scheduler/SchedulerTest.java
@@ -0,0 +1,341 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.scheduler;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.HashMap;
+import java.util.Map;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.InputSplit;
+import org.xml.sax.InputSource;
+import org.xml.sax.SAXException;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.client.NodeStatus;
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.topology.ClusterTopology;
+import edu.uci.ics.hyracks.api.topology.TopologyDefinitionParser;
+
+@SuppressWarnings("deprecation")
+public class SchedulerTest extends TestCase {
+    private static String TOPOLOGY_PATH = "src/test/resources/topology.xml";
+
+    private ClusterTopology parseTopology() throws IOException, SAXException {
+        FileReader fr = new FileReader(TOPOLOGY_PATH);
+        InputSource in = new InputSource(fr);
+        try {
+            return TopologyDefinitionParser.parse(in);
+        } finally {
+            fr.close();
+        }
+    }
+
+    /**
+     * Test the scheduler for the case when the Hyracks cluster is the HDFS cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSimple() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.1")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.2")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.3")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.4")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.5")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.6")
+                .getAddress(), 5098)));
+
+        InputSplit[] fileSplits = new InputSplit[6];
+        fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" });
+        fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" });
+        fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+
+        String[] expectedResults = new String[] { "nc1", "nc4", "nc6", "nc2", "nc3", "nc5" };
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+
+        ClusterTopology topology = parseTopology();
+        scheduler = new Scheduler(ncNameToNcInfos, topology);
+        locationConstraints = scheduler.getLocationConstraints(fileSplits);
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerLargerHDFS() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.1")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.2")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.3")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.4")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc7", new NodeControllerInfo("nc7", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.7").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.5")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc12", new NodeControllerInfo("nc12", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.12").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.5")
+                .getAddress(), 5098)));
+
+        InputSplit[] fileSplits = new InputSplit[12];
+        fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" });
+        fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" });
+        fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+        fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[8] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.14", "10.0.0.11", "10.0.0.13" });
+        fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" });
+        fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.7" });
+        fileSplits[11] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" });
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc12",
+                "nc7", "nc7", "nc12" };
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+
+        expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc7", "nc12", "nc7",
+                "nc12" };
+        ClusterTopology topology = parseTopology();
+        scheduler = new Scheduler(ncNameToNcInfos, topology);
+        locationConstraints = scheduler.getLocationConstraints(fileSplits);
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSmallerHDFS() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.1")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.2")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.3")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.4")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.5")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.6")
+                .getAddress(), 5098)));
+
+        InputSplit[] fileSplits = new InputSplit[12];
+        fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.3" });
+        fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.3" });
+        fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+        fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[8] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.1" });
+        fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.2" });
+        fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[11] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+
+        String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc6",
+                "nc5", "nc6" };
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+
+        ClusterTopology topology = parseTopology();
+        scheduler = new Scheduler(ncNameToNcInfos, topology);
+        locationConstraints = scheduler.getLocationConstraints(fileSplits);
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSmallerHDFSOdd() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.1")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.2")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.3")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.4")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.5")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.6")
+                .getAddress(), 5098)));
+
+        InputSplit[] fileSplits = new InputSplit[13];
+        fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[1] = new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[2] = new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.3" });
+        fileSplits[3] = new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.3" });
+        fileSplits[4] = new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+        fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
+        fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[8] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.1" });
+        fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.2" });
+        fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
+        fileSplits[11] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
+        fileSplits[12] = new FileSplit(new Path("part-13"), 0, 0, new String[] { "10.0.0.2", "10.0.0.4", "10.0.0.5" });
+
+        String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc1",
+                "nc5", "nc2", "nc4" };
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+
+        ClusterTopology topology = parseTopology();
+        scheduler = new Scheduler(ncNameToNcInfos, topology);
+        locationConstraints = scheduler.getLocationConstraints(fileSplits);
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+
+    }
+
+    /**
+     * Test boundary cases where splits array is empty or null
+     * 
+     * @throws Exception
+     */
+    public void testSchedulercBoundary() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.1")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.2")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.3")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.4")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.5")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.6")
+                .getAddress(), 5098)));
+
+        /** test empty file splits */
+        InputSplit[] fileSplits = new InputSplit[0];
+        String[] expectedResults = new String[] {};
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+
+        ClusterTopology topology = parseTopology();
+        scheduler = new Scheduler(ncNameToNcInfos, topology);
+        locationConstraints = scheduler.getLocationConstraints(fileSplits);
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+
+        fileSplits = null;
+        expectedResults = new String[] {};
+
+        scheduler = new Scheduler(ncNameToNcInfos);
+        locationConstraints = scheduler.getLocationConstraints(fileSplits);
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+
+        scheduler = new Scheduler(ncNameToNcInfos, topology);
+        locationConstraints = scheduler.getLocationConstraints(fileSplits);
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/HyracksUtils.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/HyracksUtils.java
new file mode 100644
index 0000000..8c12518
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/HyracksUtils.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.utils;
+
+import java.util.EnumSet;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobFlag;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
+import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
+import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+
+public class HyracksUtils {
+
+    public static final String NC1_ID = "nc1";
+    public static final String NC2_ID = "nc2";
+
+    public static final int DEFAULT_HYRACKS_CC_PORT = 1099;
+    public static final int TEST_HYRACKS_CC_PORT = 1099;
+    public static final int TEST_HYRACKS_CC_CLIENT_PORT = 2099;
+    public static final String CC_HOST = "localhost";
+
+    public static final int FRAME_SIZE = 65536;
+
+    private static ClusterControllerService cc;
+    private static NodeControllerService nc1;
+    private static NodeControllerService nc2;
+    private static IHyracksClientConnection hcc;
+
+    public static void init() throws Exception {
+        CCConfig ccConfig = new CCConfig();
+        ccConfig.clientNetIpAddress = CC_HOST;
+        ccConfig.clusterNetIpAddress = CC_HOST;
+        ccConfig.clusterNetPort = TEST_HYRACKS_CC_PORT;
+        ccConfig.clientNetPort = TEST_HYRACKS_CC_CLIENT_PORT;
+        ccConfig.defaultMaxJobAttempts = 0;
+        ccConfig.jobHistorySize = 0;
+        ccConfig.profileDumpPeriod = -1;
+
+        // cluster controller
+        cc = new ClusterControllerService(ccConfig);
+        cc.start();
+
+        // two node controllers
+        NCConfig ncConfig1 = new NCConfig();
+        ncConfig1.ccHost = "localhost";
+        ncConfig1.clusterNetIPAddress = "localhost";
+        ncConfig1.ccPort = TEST_HYRACKS_CC_PORT;
+        ncConfig1.dataIPAddress = "127.0.0.1";
+        ncConfig1.datasetIPAddress = "127.0.0.1";
+        ncConfig1.nodeId = NC1_ID;
+        nc1 = new NodeControllerService(ncConfig1);
+        nc1.start();
+
+        NCConfig ncConfig2 = new NCConfig();
+        ncConfig2.ccHost = "localhost";
+        ncConfig2.clusterNetIPAddress = "localhost";
+        ncConfig2.ccPort = TEST_HYRACKS_CC_PORT;
+        ncConfig2.dataIPAddress = "127.0.0.1";
+        ncConfig2.datasetIPAddress = "127.0.0.1";
+        ncConfig2.nodeId = NC2_ID;
+        nc2 = new NodeControllerService(ncConfig2);
+        nc2.start();
+
+        // hyracks connection
+        hcc = new HyracksConnection(CC_HOST, TEST_HYRACKS_CC_CLIENT_PORT);
+    }
+
+    public static void destroyApp(String hyracksAppName) throws Exception {
+        hcc.destroyApplication(hyracksAppName);
+    }
+
+    public static void createApp(String hyracksAppName) throws Exception {
+        hcc.createApplication(hyracksAppName, null);
+    }
+
+    public static void deinit() throws Exception {
+        nc2.stop();
+        nc1.stop();
+        cc.stop();
+    }
+
+    public static void runJob(JobSpecification spec, String appName) throws Exception {
+        spec.setFrameSize(FRAME_SIZE);
+        JobId jobId = hcc.startJob(appName, spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
+        hcc.waitForCompletion(jobId);
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/TestUtils.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/TestUtils.java
new file mode 100644
index 0000000..3826688
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs/utils/TestUtils.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs.utils;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+
+public class TestUtils {
+
+    public static void compareWithResult(File expectedFile, File actualFile) throws Exception {
+        BufferedReader readerExpected = new BufferedReader(new FileReader(expectedFile));
+        BufferedReader readerActual = new BufferedReader(new FileReader(actualFile));
+        String lineExpected, lineActual;
+        int num = 1;
+        try {
+            while ((lineExpected = readerExpected.readLine()) != null) {
+                lineActual = readerActual.readLine();
+                // Assert.assertEquals(lineExpected, lineActual);
+                if (lineActual == null) {
+                    throw new Exception("Actual result changed at line " + num + ":\n< " + lineExpected + "\n> ");
+                }
+                if (!equalStrings(lineExpected, lineActual)) {
+                    throw new Exception("Result for changed at line " + num + ":\n< " + lineExpected + "\n> "
+                            + lineActual);
+                }
+                ++num;
+            }
+            lineActual = readerActual.readLine();
+            if (lineActual != null) {
+                throw new Exception("Actual result changed at line " + num + ":\n< \n> " + lineActual);
+            }
+        } finally {
+            readerExpected.close();
+            readerActual.close();
+        }
+    }
+
+    private static boolean equalStrings(String s1, String s2) {
+        String[] rowsOne = s1.split("\n");
+        String[] rowsTwo = s2.split("\n");
+
+        if (rowsOne.length != rowsTwo.length)
+            return false;
+
+        for (int i = 0; i < rowsOne.length; i++) {
+            String row1 = rowsOne[i];
+            String row2 = rowsTwo[i];
+
+            if (row1.equals(row2))
+                continue;
+
+            String[] fields1 = row1.split(",");
+            String[] fields2 = row2.split(",");
+
+            for (int j = 0; j < fields1.length; j++) {
+                if (fields1[j].equals(fields2[j])) {
+                    continue;
+                } else if (fields1[j].indexOf('.') < 0) {
+                    return false;
+                } else {
+                    fields1[j] = fields1[j].split("=")[1];
+                    fields2[j] = fields2[j].split("=")[1];
+                    Double double1 = Double.parseDouble(fields1[j]);
+                    Double double2 = Double.parseDouble(fields2[j]);
+                    float float1 = (float) double1.doubleValue();
+                    float float2 = (float) double2.doubleValue();
+
+                    if (Math.abs(float1 - float2) == 0)
+                        continue;
+                    else {
+                        return false;
+                    }
+                }
+            }
+        }
+        return true;
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/dataflow/DataflowTest.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/dataflow/DataflowTest.java
new file mode 100644
index 0000000..9f77979
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/dataflow/DataflowTest.java
@@ -0,0 +1,211 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.dataflow;
+
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.List;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.hdfs.MiniDFSClusterFactory;
+import edu.uci.ics.hyracks.hdfs.lib.RawBinaryComparatorFactory;
+import edu.uci.ics.hyracks.hdfs.lib.RawBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.hdfs.lib.TextKeyValueParserFactory;
+import edu.uci.ics.hyracks.hdfs.lib.TextTupleWriterFactory;
+import edu.uci.ics.hyracks.hdfs.utils.HyracksUtils;
+import edu.uci.ics.hyracks.hdfs.utils.TestUtils;
+import edu.uci.ics.hyracks.hdfs2.scheduler.Scheduler;
+
+/**
+ * Test the edu.uci.ics.hyracks.hdfs2.dataflow package,
+ * the operators for the Hadoop new API.
+ */
+public class DataflowTest extends TestCase {
+
+    private static final String ACTUAL_RESULT_DIR = "actual";
+    private static final String EXPECTED_RESULT_PATH = "src/test/resources/expected";
+    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
+
+    private static final String DATA_PATH = "src/test/resources/data/customer.tbl";
+    private static final String HDFS_INPUT_PATH = "/customer/";
+    private static final String HDFS_OUTPUT_PATH = "/customer_result/";
+
+    private static final String HYRACKS_APP_NAME = "DataflowTest";
+    private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
+    private MiniDFSCluster dfsCluster;
+    private MiniDFSClusterFactory dfsClusterFactory = new MiniDFSClusterFactory();
+
+    private Job conf;
+    private int numberOfNC = 2;
+
+    @Override
+    public void setUp() throws Exception {
+        conf = new Job();
+        cleanupStores();
+        HyracksUtils.init();
+        HyracksUtils.createApp(HYRACKS_APP_NAME);
+        FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
+        FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
+        startHDFS();
+    }
+
+    private void cleanupStores() throws IOException {
+        FileUtils.forceMkdir(new File("teststore"));
+        FileUtils.forceMkdir(new File("build"));
+        FileUtils.cleanDirectory(new File("teststore"));
+        FileUtils.cleanDirectory(new File("build"));
+    }
+
+    /**
+     * Start the HDFS cluster and setup the data files
+     * 
+     * @throws IOException
+     */
+    private void startHDFS() throws IOException {
+        conf.getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
+        conf.getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
+        conf.getConfiguration().addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
+
+        FileSystem lfs = FileSystem.getLocal(new Configuration());
+        lfs.delete(new Path("build"), true);
+        System.setProperty("hadoop.log.dir", "logs");
+        dfsCluster = dfsClusterFactory.getMiniDFSCluster(conf.getConfiguration(), numberOfNC);
+        FileSystem dfs = FileSystem.get(conf.getConfiguration());
+        Path src = new Path(DATA_PATH);
+        Path dest = new Path(HDFS_INPUT_PATH);
+        Path result = new Path(HDFS_OUTPUT_PATH);
+        dfs.mkdirs(dest);
+        dfs.mkdirs(result);
+        dfs.copyFromLocalFile(src, dest);
+
+        DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
+        conf.getConfiguration().writeXml(confOutput);
+        confOutput.flush();
+        confOutput.close();
+    }
+
+    /**
+     * Test a job with only HDFS read and writes.
+     * 
+     * @throws Exception
+     */
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public void testHDFSReadWriteOperators() throws Exception {
+        FileInputFormat.setInputPaths(conf, HDFS_INPUT_PATH);
+        FileOutputFormat.setOutputPath(conf, new Path(HDFS_OUTPUT_PATH));
+        conf.setInputFormatClass(TextInputFormat.class);
+
+        Scheduler scheduler = new Scheduler(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+        InputFormat inputFormat = ReflectionUtils.newInstance(conf.getInputFormatClass(), conf.getConfiguration());
+        List<InputSplit> splits = inputFormat.getSplits(conf);
+
+        String[] readSchedule = scheduler.getLocationConstraints(splits);
+        JobSpecification jobSpec = new JobSpecification();
+        RecordDescriptor recordDesc = new RecordDescriptor(
+                new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
+
+        String[] locations = new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID,
+                HyracksUtils.NC2_ID };
+        HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits,
+                readSchedule, new TextKeyValueParserFactory());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, readOperator, locations);
+
+        ExternalSortOperatorDescriptor sortOperator = new ExternalSortOperatorDescriptor(jobSpec, 10, new int[] { 0 },
+                new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, recordDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, sortOperator, locations);
+
+        HDFSWriteOperatorDescriptor writeOperator = new HDFSWriteOperatorDescriptor(jobSpec, conf,
+                new TextTupleWriterFactory());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, writeOperator, HyracksUtils.NC1_ID);
+
+        jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), readOperator, 0, sortOperator, 0);
+        jobSpec.connect(new MToNPartitioningMergingConnectorDescriptor(jobSpec, new FieldHashPartitionComputerFactory(
+                new int[] { 0 }, new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
+                new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }), sortOperator,
+                0, writeOperator, 0);
+        jobSpec.addRoot(writeOperator);
+
+        IHyracksClientConnection client = new HyracksConnection(HyracksUtils.CC_HOST,
+                HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+        JobId jobId = client.startJob(HYRACKS_APP_NAME, jobSpec);
+        client.waitForCompletion(jobId);
+
+        Assert.assertEquals(true, checkResults());
+    }
+
+    /**
+     * Check if the results are correct
+     * 
+     * @return true if correct
+     * @throws Exception
+     */
+    private boolean checkResults() throws Exception {
+        FileSystem dfs = FileSystem.get(conf.getConfiguration());
+        Path result = new Path(HDFS_OUTPUT_PATH);
+        Path actual = new Path(ACTUAL_RESULT_DIR);
+        dfs.copyToLocalFile(result, actual);
+
+        TestUtils.compareWithResult(new File(EXPECTED_RESULT_PATH + File.separator + "part-0"), new File(
+                ACTUAL_RESULT_DIR + File.separator + "customer_result" + File.separator + "part-0"));
+        return true;
+    }
+
+    /**
+     * cleanup hdfs cluster
+     */
+    private void cleanupHDFS() throws Exception {
+        dfsCluster.shutdown();
+    }
+
+    @Override
+    public void tearDown() throws Exception {
+        HyracksUtils.destroyApp(HYRACKS_APP_NAME);
+        HyracksUtils.deinit();
+        cleanupHDFS();
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/scheduler/SchedulerTest.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/scheduler/SchedulerTest.java
new file mode 100644
index 0000000..442aeae0
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/edu/uci/ics/hyracks/hdfs2/scheduler/SchedulerTest.java
@@ -0,0 +1,238 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.hdfs2.scheduler;
+
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.client.NodeStatus;
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+
+/**
+ * Test case for the new HDFS API scheduler
+ */
+public class SchedulerTest extends TestCase {
+
+    /**
+     * Test the scheduler for the case when the Hyracks cluster is the HDFS cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSimple() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.1")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.2")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.3")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.4")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.5")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.6")
+                .getAddress(), 5098)));
+
+        List<InputSplit> fileSplits = new ArrayList<InputSplit>();
+        fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc4", "nc6", "nc2", "nc3", "nc5" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerLargerHDFS() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.1")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.2")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.3")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.4")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.5")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.6")
+                .getAddress(), 5098)));
+
+        List<InputSplit> fileSplits = new ArrayList<InputSplit>();
+        fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" }));
+        fileSplits.add(new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.7" }));
+        fileSplits.add(new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc4", "nc6", "nc1", "nc4", "nc2", "nc2", "nc3", "nc6", "nc5",
+                "nc3", "nc5" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSmallerHDFS() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.1")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.2")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.3")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.4")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.5")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.6")
+                .getAddress(), 5098)));
+
+        List<InputSplit> fileSplits = new ArrayList<InputSplit>();
+        fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.1" }));
+        fileSplits.add(new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.2" }));
+        fileSplits.add(new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc6",
+                "nc5", "nc6" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+    /**
+     * Test the case where the HDFS cluster is a larger than the Hyracks cluster
+     * 
+     * @throws Exception
+     */
+    public void testSchedulerSmallerHDFSOdd() throws Exception {
+        Map<String, NodeControllerInfo> ncNameToNcInfos = new HashMap<String, NodeControllerInfo>();
+        ncNameToNcInfos.put("nc1", new NodeControllerInfo("nc1", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.1").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.1")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc2", new NodeControllerInfo("nc2", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.2").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.2")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc3", new NodeControllerInfo("nc3", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.3").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.3")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc4", new NodeControllerInfo("nc4", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.4").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.4")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc5", new NodeControllerInfo("nc5", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.5").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.5")
+                .getAddress(), 5098)));
+        ncNameToNcInfos.put("nc6", new NodeControllerInfo("nc6", NodeStatus.ALIVE, new NetworkAddress(InetAddress
+                .getByName("10.0.0.6").getAddress(), 5099), new NetworkAddress(InetAddress.getByName("10.0.0.6")
+                .getAddress(), 5098)));
+
+        List<InputSplit> fileSplits = new ArrayList<InputSplit>();
+        fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-2"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-3"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-4"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-5"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
+        fileSplits.add(new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.1" }));
+        fileSplits.add(new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.2" }));
+        fileSplits.add(new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" }));
+        fileSplits.add(new FileSplit(new Path("part-13"), 0, 0, new String[] { "10.0.0.2", "10.0.0.4", "10.0.0.5" }));
+
+        Scheduler scheduler = new Scheduler(ncNameToNcInfos);
+        String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
+
+        String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc1",
+                "nc5", "nc2", "nc4" };
+
+        for (int i = 0; i < locationConstraints.length; i++) {
+            Assert.assertEquals(locationConstraints[i], expectedResults[i]);
+        }
+    }
+
+}
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/data/customer.tbl b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/data/customer.tbl
new file mode 100644
index 0000000..5d39c80
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/data/customer.tbl
@@ -0,0 +1,150 @@
+1|Customer#000000001|IVhzIApeRb ot,c,E|15|25-989-741-2988|711.56|BUILDING|to the even, regular platelets. regular, ironic epitaphs nag e|
+2|Customer#000000002|XSTf4,NCwDVaWNe6tEgvwfmRchLXak|13|23-768-687-3665|121.65|AUTOMOBILE|l accounts. blithely ironic theodolites integrate boldly: caref|
+3|Customer#000000003|MG9kdTD2WBHm|1|11-719-748-3364|7498.12|AUTOMOBILE| deposits eat slyly ironic, even instructions. express foxes detect slyly. blithely even accounts abov|
+4|Customer#000000004|XxVSJsLAGtn|4|14-128-190-5944|2866.83|MACHINERY| requests. final, regular ideas sleep final accou|
+5|Customer#000000005|KvpyuHCplrB84WgAiGV6sYpZq7Tj|3|13-750-942-6364|794.47|HOUSEHOLD|n accounts will have to unwind. foxes cajole accor|
+6|Customer#000000006|sKZz0CsnMD7mp4Xd0YrBvx,LREYKUWAh yVn|20|30-114-968-4951|7638.57|AUTOMOBILE|tions. even deposits boost according to the slyly bold packages. final accounts cajole requests. furious|
+7|Customer#000000007|TcGe5gaZNgVePxU5kRrvXBfkasDTea|18|28-190-982-9759|9561.95|AUTOMOBILE|ainst the ironic, express theodolites. express, even pinto beans among the exp|
+8|Customer#000000008|I0B10bB0AymmC, 0PrRYBCP1yGJ8xcBPmWhl5|17|27-147-574-9335|6819.74|BUILDING|among the slyly regular theodolites kindle blithely courts. carefully even theodolites haggle slyly along the ide|
+9|Customer#000000009|xKiAFTjUsCuxfeleNqefumTrjS|8|18-338-906-3675|8324.07|FURNITURE|r theodolites according to the requests wake thinly excuses: pending requests haggle furiousl|
+10|Customer#000000010|6LrEaV6KR6PLVcgl2ArL Q3rqzLzcT1 v2|5|15-741-346-9870|2753.54|HOUSEHOLD|es regular deposits haggle. fur|
+11|Customer#000000011|PkWS 3HlXqwTuzrKg633BEi|23|33-464-151-3439|-272.60|BUILDING|ckages. requests sleep slyly. quickly even pinto beans promise above the slyly regular pinto beans. |
+12|Customer#000000012|9PWKuhzT4Zr1Q|13|23-791-276-1263|3396.49|HOUSEHOLD| to the carefully final braids. blithely regular requests nag. ironic theodolites boost quickly along|
+13|Customer#000000013|nsXQu0oVjD7PM659uC3SRSp|3|13-761-547-5974|3857.34|BUILDING|ounts sleep carefully after the close frays. carefully bold notornis use ironic requests. blithely|
+14|Customer#000000014|KXkletMlL2JQEA |1|11-845-129-3851|5266.30|FURNITURE|, ironic packages across the unus|
+15|Customer#000000015|YtWggXoOLdwdo7b0y,BZaGUQMLJMX1Y,EC,6Dn|23|33-687-542-7601|2788.52|HOUSEHOLD| platelets. regular deposits detect asymptotes. blithely unusual packages nag slyly at the fluf|
+16|Customer#000000016|cYiaeMLZSMAOQ2 d0W,|10|20-781-609-3107|4681.03|FURNITURE|kly silent courts. thinly regular theodolites sleep fluffily after |
+17|Customer#000000017|izrh 6jdqtp2eqdtbkswDD8SG4SzXruMfIXyR7|2|12-970-682-3487|6.34|AUTOMOBILE|packages wake! blithely even pint|
+18|Customer#000000018|3txGO AiuFux3zT0Z9NYaFRnZt|6|16-155-215-1315|5494.43|BUILDING|s sleep. carefully even instructions nag furiously alongside of t|
+19|Customer#000000019|uc,3bHIx84H,wdrmLOjVsiqXCq2tr|18|28-396-526-5053|8914.71|HOUSEHOLD| nag. furiously careful packages are slyly at the accounts. furiously regular in|
+20|Customer#000000020|JrPk8Pqplj4Ne|22|32-957-234-8742|7603.40|FURNITURE|g alongside of the special excuses-- fluffily enticing packages wake |
+21|Customer#000000021|XYmVpr9yAHDEn|8|18-902-614-8344|1428.25|MACHINERY| quickly final accounts integrate blithely furiously u|
+22|Customer#000000022|QI6p41,FNs5k7RZoCCVPUTkUdYpB|3|13-806-545-9701|591.98|MACHINERY|s nod furiously above the furiously ironic ideas. |
+23|Customer#000000023|OdY W13N7Be3OC5MpgfmcYss0Wn6TKT|3|13-312-472-8245|3332.02|HOUSEHOLD|deposits. special deposits cajole slyly. fluffily special deposits about the furiously |
+24|Customer#000000024|HXAFgIAyjxtdqwimt13Y3OZO 4xeLe7U8PqG|13|23-127-851-8031|9255.67|MACHINERY|into beans. fluffily final ideas haggle fluffily|
+25|Customer#000000025|Hp8GyFQgGHFYSilH5tBfe|12|22-603-468-3533|7133.70|FURNITURE|y. accounts sleep ruthlessly according to the regular theodolites. unusual instructions sleep. ironic, final|
+26|Customer#000000026|8ljrc5ZeMl7UciP|22|32-363-455-4837|5182.05|AUTOMOBILE|c requests use furiously ironic requests. slyly ironic dependencies us|
+27|Customer#000000027|IS8GIyxpBrLpMT0u7|3|13-137-193-2709|5679.84|BUILDING| about the carefully ironic pinto beans. accoun|
+28|Customer#000000028|iVyg0daQ,Tha8x2WPWA9m2529m|8|18-774-241-1462|1007.18|FURNITURE| along the regular deposits. furiously final pac|
+29|Customer#000000029|sJ5adtfyAkCK63df2,vF25zyQMVYE34uh|0|10-773-203-7342|7618.27|FURNITURE|its after the carefully final platelets x-ray against |
+30|Customer#000000030|nJDsELGAavU63Jl0c5NKsKfL8rIJQQkQnYL2QJY|1|11-764-165-5076|9321.01|BUILDING|lithely final requests. furiously unusual account|
+31|Customer#000000031|LUACbO0viaAv6eXOAebryDB xjVst|23|33-197-837-7094|5236.89|HOUSEHOLD|s use among the blithely pending depo|
+32|Customer#000000032|jD2xZzi UmId,DCtNBLXKj9q0Tlp2iQ6ZcO3J|15|25-430-914-2194|3471.53|BUILDING|cial ideas. final, furious requests across the e|
+33|Customer#000000033|qFSlMuLucBmx9xnn5ib2csWUweg D|17|27-375-391-1280|-78.56|AUTOMOBILE|s. slyly regular accounts are furiously. carefully pending requests|
+34|Customer#000000034|Q6G9wZ6dnczmtOx509xgE,M2KV|15|25-344-968-5422|8589.70|HOUSEHOLD|nder against the even, pending accounts. even|
+35|Customer#000000035|TEjWGE4nBzJL2|17|27-566-888-7431|1228.24|HOUSEHOLD|requests. special, express requests nag slyly furiousl|
+36|Customer#000000036|3TvCzjuPzpJ0,DdJ8kW5U|21|31-704-669-5769|4987.27|BUILDING|haggle. enticing, quiet platelets grow quickly bold sheaves. carefully regular acc|
+37|Customer#000000037|7EV4Pwh,3SboctTWt|8|18-385-235-7162|-917.75|FURNITURE|ilent packages are carefully among the deposits. furiousl|
+38|Customer#000000038|a5Ee5e9568R8RLP 2ap7|12|22-306-880-7212|6345.11|HOUSEHOLD|lar excuses. closely even asymptotes cajole blithely excuses. carefully silent pinto beans sleep carefully fin|
+39|Customer#000000039|nnbRg,Pvy33dfkorYE FdeZ60|2|12-387-467-6509|6264.31|AUTOMOBILE|tions. slyly silent excuses slee|
+40|Customer#000000040|gOnGWAyhSV1ofv|3|13-652-915-8939|1335.30|BUILDING|rges impress after the slyly ironic courts. foxes are. blithely |
+41|Customer#000000041|IM9mzmyoxeBmvNw8lA7G3Ydska2nkZF|10|20-917-711-4011|270.95|HOUSEHOLD|ly regular accounts hang bold, silent packages. unusual foxes haggle slyly above the special, final depo|
+42|Customer#000000042|ziSrvyyBke|5|15-416-330-4175|8727.01|BUILDING|ssly according to the pinto beans: carefully special requests across the even, pending accounts wake special|
+43|Customer#000000043|ouSbjHk8lh5fKX3zGso3ZSIj9Aa3PoaFd|19|29-316-665-2897|9904.28|MACHINERY|ial requests: carefully pending foxes detect quickly. carefully final courts cajole quickly. carefully|
+44|Customer#000000044|Oi,dOSPwDu4jo4x,,P85E0dmhZGvNtBwi|16|26-190-260-5375|7315.94|AUTOMOBILE|r requests around the unusual, bold a|
+45|Customer#000000045|4v3OcpFgoOmMG,CbnF,4mdC|9|19-715-298-9917|9983.38|AUTOMOBILE|nto beans haggle slyly alongside of t|
+46|Customer#000000046|eaTXWWm10L9|6|16-357-681-2007|5744.59|AUTOMOBILE|ctions. accounts sleep furiously even requests. regular, regular accounts cajole blithely around the final pa|
+47|Customer#000000047|b0UgocSqEW5 gdVbhNT|2|12-427-271-9466|274.58|BUILDING|ions. express, ironic instructions sleep furiously ironic ideas. furi|
+48|Customer#000000048|0UU iPhBupFvemNB|0|10-508-348-5882|3792.50|BUILDING|re fluffily pending foxes. pending, bold platelets sleep slyly. even platelets cajo|
+49|Customer#000000049|cNgAeX7Fqrdf7HQN9EwjUa4nxT,68L FKAxzl|10|20-908-631-4424|4573.94|FURNITURE|nusual foxes! fluffily pending packages maintain to the regular |
+50|Customer#000000050|9SzDYlkzxByyJ1QeTI o|6|16-658-112-3221|4266.13|MACHINERY|ts. furiously ironic accounts cajole furiously slyly ironic dinos.|
+51|Customer#000000051|uR,wEaiTvo4|12|22-344-885-4251|855.87|FURNITURE|eposits. furiously regular requests integrate carefully packages. furious|
+52|Customer#000000052|7 QOqGqqSy9jfV51BC71jcHJSD0|11|21-186-284-5998|5630.28|HOUSEHOLD|ic platelets use evenly even accounts. stealthy theodolites cajole furiou|
+53|Customer#000000053|HnaxHzTfFTZs8MuCpJyTbZ47Cm4wFOOgib|15|25-168-852-5363|4113.64|HOUSEHOLD|ar accounts are. even foxes are blithely. fluffily pending deposits boost|
+54|Customer#000000054|,k4vf 5vECGWFy,hosTE,|4|14-776-370-4745|868.90|AUTOMOBILE|sual, silent accounts. furiously express accounts cajole special deposits. final, final accounts use furi|
+55|Customer#000000055|zIRBR4KNEl HzaiV3a i9n6elrxzDEh8r8pDom|10|20-180-440-8525|4572.11|MACHINERY|ully unusual packages wake bravely bold packages. unusual requests boost deposits! blithely ironic packages ab|
+56|Customer#000000056|BJYZYJQk4yD5B|10|20-895-685-6920|6530.86|FURNITURE|. notornis wake carefully. carefully fluffy requests are furiously even accounts. slyly expre|
+57|Customer#000000057|97XYbsuOPRXPWU|21|31-835-306-1650|4151.93|AUTOMOBILE|ove the carefully special packages. even, unusual deposits sleep slyly pend|
+58|Customer#000000058|g9ap7Dk1Sv9fcXEWjpMYpBZIRUohi T|13|23-244-493-2508|6478.46|HOUSEHOLD|ideas. ironic ideas affix furiously express, final instructions. regular excuses use quickly e|
+59|Customer#000000059|zLOCP0wh92OtBihgspOGl4|1|11-355-584-3112|3458.60|MACHINERY|ously final packages haggle blithely after the express deposits. furiou|
+60|Customer#000000060|FyodhjwMChsZmUz7Jz0H|12|22-480-575-5866|2741.87|MACHINERY|latelets. blithely unusual courts boost furiously about the packages. blithely final instruct|
+61|Customer#000000061|9kndve4EAJxhg3veF BfXr7AqOsT39o gtqjaYE|17|27-626-559-8599|1536.24|FURNITURE|egular packages shall have to impress along the |
+62|Customer#000000062|upJK2Dnw13,|7|17-361-978-7059|595.61|MACHINERY|kly special dolphins. pinto beans are slyly. quickly regular accounts are furiously a|
+63|Customer#000000063|IXRSpVWWZraKII|21|31-952-552-9584|9331.13|AUTOMOBILE|ithely even accounts detect slyly above the fluffily ir|
+64|Customer#000000064|MbCeGY20kaKK3oalJD,OT|3|13-558-731-7204|-646.64|BUILDING|structions after the quietly ironic theodolites cajole be|
+65|Customer#000000065|RGT yzQ0y4l0H90P783LG4U95bXQFDRXbWa1sl,X|23|33-733-623-5267|8795.16|AUTOMOBILE|y final foxes serve carefully. theodolites are carefully. pending i|
+66|Customer#000000066|XbsEqXH1ETbJYYtA1A|22|32-213-373-5094|242.77|HOUSEHOLD|le slyly accounts. carefully silent packages benea|
+67|Customer#000000067|rfG0cOgtr5W8 xILkwp9fpCS8|9|19-403-114-4356|8166.59|MACHINERY|indle furiously final, even theodo|
+68|Customer#000000068|o8AibcCRkXvQFh8hF,7o|12|22-918-832-2411|6853.37|HOUSEHOLD| pending pinto beans impress realms. final dependencies |
+69|Customer#000000069|Ltx17nO9Wwhtdbe9QZVxNgP98V7xW97uvSH1prEw|9|19-225-978-5670|1709.28|HOUSEHOLD|thely final ideas around the quickly final dependencies affix carefully quickly final theodolites. final accounts c|
+70|Customer#000000070|mFowIuhnHjp2GjCiYYavkW kUwOjIaTCQ|22|32-828-107-2832|4867.52|FURNITURE|fter the special asymptotes. ideas after the unusual frets cajole quickly regular pinto be|
+71|Customer#000000071|TlGalgdXWBmMV,6agLyWYDyIz9MKzcY8gl,w6t1B|7|17-710-812-5403|-611.19|HOUSEHOLD|g courts across the regular, final pinto beans are blithely pending ac|
+72|Customer#000000072|putjlmskxE,zs,HqeIA9Wqu7dhgH5BVCwDwHHcf|2|12-759-144-9689|-362.86|FURNITURE|ithely final foxes sleep always quickly bold accounts. final wat|
+73|Customer#000000073|8IhIxreu4Ug6tt5mog4|0|10-473-439-3214|4288.50|BUILDING|usual, unusual packages sleep busily along the furiou|
+74|Customer#000000074|IkJHCA3ZThF7qL7VKcrU nRLl,kylf |4|14-199-862-7209|2764.43|MACHINERY|onic accounts. blithely slow packages would haggle carefully. qui|
+75|Customer#000000075|Dh 6jZ,cwxWLKQfRKkiGrzv6pm|18|28-247-803-9025|6684.10|AUTOMOBILE| instructions cajole even, even deposits. finally bold deposits use above the even pains. slyl|
+76|Customer#000000076|m3sbCvjMOHyaOofH,e UkGPtqc4|0|10-349-718-3044|5745.33|FURNITURE|pecial deposits. ironic ideas boost blithely according to the closely ironic theodolites! furiously final deposits n|
+77|Customer#000000077|4tAE5KdMFGD4byHtXF92vx|17|27-269-357-4674|1738.87|BUILDING|uffily silent requests. carefully ironic asymptotes among the ironic hockey players are carefully bli|
+78|Customer#000000078|HBOta,ZNqpg3U2cSL0kbrftkPwzX|9|19-960-700-9191|7136.97|FURNITURE|ests. blithely bold pinto beans h|
+79|Customer#000000079|n5hH2ftkVRwW8idtD,BmM2|15|25-147-850-4166|5121.28|MACHINERY|es. packages haggle furiously. regular, special requests poach after the quickly express ideas. blithely pending re|
+80|Customer#000000080|K,vtXp8qYB |0|10-267-172-7101|7383.53|FURNITURE|tect among the dependencies. bold accounts engage closely even pinto beans. ca|
+81|Customer#000000081|SH6lPA7JiiNC6dNTrR|20|30-165-277-3269|2023.71|BUILDING|r packages. fluffily ironic requests cajole fluffily. ironically regular theodolit|
+82|Customer#000000082|zhG3EZbap4c992Gj3bK,3Ne,Xn|18|28-159-442-5305|9468.34|AUTOMOBILE|s wake. bravely regular accounts are furiously. regula|
+83|Customer#000000083|HnhTNB5xpnSF20JBH4Ycs6psVnkC3RDf|22|32-817-154-4122|6463.51|BUILDING|ccording to the quickly bold warhorses. final, regular foxes integrate carefully. bold packages nag blithely ev|
+84|Customer#000000084|lpXz6Fwr9945rnbtMc8PlueilS1WmASr CB|11|21-546-818-3802|5174.71|FURNITURE|ly blithe foxes. special asymptotes haggle blithely against the furiously regular depo|
+85|Customer#000000085|siRerlDwiolhYR 8FgksoezycLj|5|15-745-585-8219|3386.64|FURNITURE|ronic ideas use above the slowly pendin|
+86|Customer#000000086|US6EGGHXbTTXPL9SBsxQJsuvy|0|10-677-951-2353|3306.32|HOUSEHOLD|quests. pending dugouts are carefully aroun|
+87|Customer#000000087|hgGhHVSWQl 6jZ6Ev|23|33-869-884-7053|6327.54|FURNITURE|hely ironic requests integrate according to the ironic accounts. slyly regular pla|
+88|Customer#000000088|wtkjBN9eyrFuENSMmMFlJ3e7jE5KXcg|16|26-516-273-2566|8031.44|AUTOMOBILE|s are quickly above the quickly ironic instructions; even requests about the carefully final deposi|
+89|Customer#000000089|dtR, y9JQWUO6FoJExyp8whOU|14|24-394-451-5404|1530.76|FURNITURE|counts are slyly beyond the slyly final accounts. quickly final ideas wake. r|
+90|Customer#000000090|QxCzH7VxxYUWwfL7|16|26-603-491-1238|7354.23|BUILDING|sly across the furiously even |
+91|Customer#000000091|S8OMYFrpHwoNHaGBeuS6E 6zhHGZiprw1b7 q|8|18-239-400-3677|4643.14|AUTOMOBILE|onic accounts. fluffily silent pinto beans boost blithely according to the fluffily exp|
+92|Customer#000000092|obP PULk2LH LqNF,K9hcbNqnLAkJVsl5xqSrY,|2|12-446-416-8471|1182.91|MACHINERY|. pinto beans hang slyly final deposits. ac|
+93|Customer#000000093|EHXBr2QGdh|7|17-359-388-5266|2182.52|MACHINERY|press deposits. carefully regular platelets r|
+94|Customer#000000094|IfVNIN9KtkScJ9dUjK3Pg5gY1aFeaXewwf|9|19-953-499-8833|5500.11|HOUSEHOLD|latelets across the bold, final requests sleep according to the fluffily bold accounts. unusual deposits amon|
+95|Customer#000000095|EU0xvmWvOmUUn5J,2z85DQyG7QCJ9Xq7|15|25-923-255-2929|5327.38|MACHINERY|ithely. ruthlessly final requests wake slyly alongside of the furiously silent pinto beans. even the|
+96|Customer#000000096|vWLOrmXhRR|8|18-422-845-1202|6323.92|AUTOMOBILE|press requests believe furiously. carefully final instructions snooze carefully. |
+97|Customer#000000097|OApyejbhJG,0Iw3j rd1M|17|27-588-919-5638|2164.48|AUTOMOBILE|haggle slyly. bold, special ideas are blithely above the thinly bold theo|
+98|Customer#000000098|7yiheXNSpuEAwbswDW|12|22-885-845-6889|-551.37|BUILDING|ages. furiously pending accounts are quickly carefully final foxes: busily pe|
+99|Customer#000000099|szsrOiPtCHVS97Lt|15|25-515-237-9232|4088.65|HOUSEHOLD|cajole slyly about the regular theodolites! furiously bold requests nag along the pending, regular packages. somas|
+100|Customer#000000100|fptUABXcmkC5Wx|20|30-749-445-4907|9889.89|FURNITURE|was furiously fluffily quiet deposits. silent, pending requests boost against |
+101|Customer#000000101|sMmL2rNeHDltovSm Y|2|12-514-298-3699|7470.96|MACHINERY| sleep. pending packages detect slyly ironic pack|
+102|Customer#000000102|UAtflJ06 fn9zBfKjInkQZlWtqaA|19|29-324-978-8538|8462.17|BUILDING|ously regular dependencies nag among the furiously express dinos. blithely final|
+103|Customer#000000103|8KIsQX4LJ7QMsj6DrtFtXu0nUEdV,8a|9|19-216-107-2107|2757.45|BUILDING|furiously pending notornis boost slyly around the blithely ironic ideas? final, even instructions cajole fl|
+104|Customer#000000104|9mcCK L7rt0SwiYtrbO88DiZS7U d7M|10|20-966-284-8065|-588.38|FURNITURE|rate carefully slyly special pla|
+105|Customer#000000105|4iSJe4L SPjg7kJj98Yz3z0B|10|20-793-553-6417|9091.82|MACHINERY|l pains cajole even accounts. quietly final instructi|
+106|Customer#000000106|xGCOEAUjUNG|1|11-751-989-4627|3288.42|MACHINERY|lose slyly. ironic accounts along the evenly regular theodolites wake about the special, final gifts. |
+107|Customer#000000107|Zwg64UZ,q7GRqo3zm7P1tZIRshBDz|15|25-336-529-9919|2514.15|AUTOMOBILE|counts cajole slyly. regular requests wake. furiously regular deposits about the blithely final fo|
+108|Customer#000000108|GPoeEvpKo1|5|15-908-619-7526|2259.38|BUILDING|refully ironic deposits sleep. regular, unusual requests wake slyly|
+109|Customer#000000109|OOOkYBgCMzgMQXUmkocoLb56rfrdWp2NE2c|16|26-992-422-8153|-716.10|BUILDING|es. fluffily final dependencies sleep along the blithely even pinto beans. final deposits haggle furiously furiou|
+110|Customer#000000110|mymPfgphaYXNYtk|10|20-893-536-2069|7462.99|AUTOMOBILE|nto beans cajole around the even, final deposits. quickly bold packages according to the furiously regular dept|
+111|Customer#000000111|CBSbPyOWRorloj2TBvrK9qp9tHBs|22|32-582-283-7528|6505.26|MACHINERY|ly unusual instructions detect fluffily special deposits-- theodolites nag carefully during the ironic dependencies|
+112|Customer#000000112|RcfgG3bO7QeCnfjqJT1|19|29-233-262-8382|2953.35|FURNITURE|rmanently unusual multipliers. blithely ruthless deposits are furiously along the|
+113|Customer#000000113|eaOl5UBXIvdY57rglaIzqvfPD,MYfK|12|22-302-930-4756|2912.00|BUILDING|usly regular theodolites boost furiously doggedly pending instructio|
+114|Customer#000000114|xAt 5f5AlFIU|14|24-805-212-7646|1027.46|FURNITURE|der the carefully express theodolites are after the packages. packages are. bli|
+115|Customer#000000115|0WFt1IXENmUT2BgbsB0ShVKJZt0HCBCbFl0aHc|8|18-971-699-1843|7508.92|HOUSEHOLD|sits haggle above the carefully ironic theodolite|
+116|Customer#000000116|yCuVxIgsZ3,qyK2rloThy3u|16|26-632-309-5792|8403.99|BUILDING|as. quickly final sauternes haggle slyly carefully even packages. brave, ironic pinto beans are above the furious|
+117|Customer#000000117|uNhM,PzsRA3S,5Y Ge5Npuhi|24|34-403-631-3505|3950.83|FURNITURE|affix. instructions are furiously sl|
+118|Customer#000000118|OVnFuHygK9wx3xpg8|18|28-639-943-7051|3582.37|AUTOMOBILE|uick packages alongside of the furiously final deposits haggle above the fluffily even foxes. blithely dogged dep|
+119|Customer#000000119|M1ETOIecuvH8DtM0Y0nryXfW|7|17-697-919-8406|3930.35|FURNITURE|express ideas. blithely ironic foxes thrash. special acco|
+120|Customer#000000120|zBNna00AEInqyO1|12|22-291-534-1571|363.75|MACHINERY| quickly. slyly ironic requests cajole blithely furiously final dependen|
+121|Customer#000000121|tv nCR2YKupGN73mQudO|17|27-411-990-2959|6428.32|BUILDING|uriously stealthy ideas. carefully final courts use carefully|
+122|Customer#000000122|yp5slqoNd26lAENZW3a67wSfXA6hTF|3|13-702-694-4520|7865.46|HOUSEHOLD| the special packages hinder blithely around the permanent requests. bold depos|
+123|Customer#000000123|YsOnaaER8MkvK5cpf4VSlq|5|15-817-151-1168|5897.83|BUILDING|ependencies. regular, ironic requests are fluffily regu|
+124|Customer#000000124|aTbyVAW5tCd,v09O|18|28-183-750-7809|1842.49|AUTOMOBILE|le fluffily even dependencies. quietly s|
+125|Customer#000000125|,wSZXdVR xxIIfm9s8ITyLl3kgjT6UC07GY0Y|19|29-261-996-3120|-234.12|FURNITURE|x-ray finally after the packages? regular requests c|
+126|Customer#000000126|ha4EHmbx3kg DYCsP6DFeUOmavtQlHhcfaqr|22|32-755-914-7592|1001.39|HOUSEHOLD|s about the even instructions boost carefully furiously ironic pearls. ruthless, |
+127|Customer#000000127|Xyge4DX2rXKxXyye1Z47LeLVEYMLf4Bfcj|21|31-101-672-2951|9280.71|MACHINERY|ic, unusual theodolites nod silently after the final, ironic instructions: pending r|
+128|Customer#000000128|AmKUMlJf2NRHcKGmKjLS|4|14-280-874-8044|-986.96|HOUSEHOLD|ing packages integrate across the slyly unusual dugouts. blithely silent ideas sublate carefully. blithely expr|
+129|Customer#000000129|q7m7rbMM0BpaCdmxloCgBDRCleXsXkdD8kf|7|17-415-148-7416|9127.27|HOUSEHOLD| unusual deposits boost carefully furiously silent ideas. pending accounts cajole slyly across|
+130|Customer#000000130|RKPx2OfZy0Vn 8wGWZ7F2EAvmMORl1k8iH|9|19-190-993-9281|5073.58|HOUSEHOLD|ix slowly. express packages along the furiously ironic requests integrate daringly deposits. fur|
+131|Customer#000000131|jyN6lAjb1FtH10rMC,XzlWyCBrg75|11|21-840-210-3572|8595.53|HOUSEHOLD|jole special packages. furiously final dependencies about the furiously speci|
+132|Customer#000000132|QM5YabAsTLp9|4|14-692-150-9717|162.57|HOUSEHOLD|uickly carefully special theodolites. carefully regular requests against the blithely unusual instructions |
+133|Customer#000000133|IMCuXdpIvdkYO92kgDGuyHgojcUs88p|17|27-408-997-8430|2314.67|AUTOMOBILE|t packages. express pinto beans are blithely along the unusual, even theodolites. silent packages use fu|
+134|Customer#000000134|sUiZ78QCkTQPICKpA9OBzkUp2FM|11|21-200-159-5932|4608.90|BUILDING|yly fluffy foxes boost final ideas. b|
+135|Customer#000000135|oZK,oC0 fdEpqUML|19|29-399-293-6241|8732.91|FURNITURE| the slyly final accounts. deposits cajole carefully. carefully sly packag|
+136|Customer#000000136|QoLsJ0v5C1IQbh,DS1|7|17-501-210-4726|-842.39|FURNITURE|ackages sleep ironic, final courts. even requests above the blithely bold requests g|
+137|Customer#000000137|cdW91p92rlAEHgJafqYyxf1Q|16|26-777-409-5654|7838.30|HOUSEHOLD|carefully regular theodolites use. silent dolphins cajo|
+138|Customer#000000138|5uyLAeY7HIGZqtu66Yn08f|5|15-394-860-4589|430.59|MACHINERY|ts doze on the busy ideas. regular|
+139|Customer#000000139|3ElvBwudHKL02732YexGVFVt |9|19-140-352-1403|7897.78|MACHINERY|nstructions. quickly ironic ideas are carefully. bold, |
+140|Customer#000000140|XRqEPiKgcETII,iOLDZp5jA|4|14-273-885-6505|9963.15|MACHINERY|ies detect slyly ironic accounts. slyly ironic theodolites hag|
+141|Customer#000000141|5IW,WROVnikc3l7DwiUDGQNGsLBGOL6Dc0|1|11-936-295-6204|6706.14|FURNITURE|packages nag furiously. carefully unusual accounts snooze according to the fluffily regular pinto beans. slyly spec|
+142|Customer#000000142|AnJ5lxtLjioClr2khl9pb8NLxG2,|9|19-407-425-2584|2209.81|AUTOMOBILE|. even, express theodolites upo|
+143|Customer#000000143|681r22uL452zqk 8By7I9o9enQfx0|16|26-314-406-7725|2186.50|MACHINERY|across the blithely unusual requests haggle theodo|
+144|Customer#000000144|VxYZ3ebhgbltnetaGjNC8qCccjYU05 fePLOno8y|1|11-717-379-4478|6417.31|MACHINERY|ges. slyly regular accounts are slyly. bold, idle reque|
+145|Customer#000000145|kQjHmt2kcec cy3hfMh969u|13|23-562-444-8454|9748.93|HOUSEHOLD|ests? express, express instructions use. blithely fina|
+146|Customer#000000146|GdxkdXG9u7iyI1,,y5tq4ZyrcEy|3|13-835-723-3223|3328.68|FURNITURE|ffily regular dinos are slyly unusual requests. slyly specia|
+147|Customer#000000147|6VvIwbVdmcsMzuu,C84GtBWPaipGfi7DV|18|28-803-187-4335|8071.40|AUTOMOBILE|ress packages above the blithely regular packages sleep fluffily blithely ironic accounts. |
+148|Customer#000000148|BhSPlEWGvIJyT9swk vCWE|11|21-562-498-6636|2135.60|HOUSEHOLD|ing to the carefully ironic requests. carefully regular dependencies about the theodolites wake furious|
+149|Customer#000000149|3byTHCp2mNLPigUrrq|19|29-797-439-6760|8959.65|AUTOMOBILE|al instructions haggle against the slyly bold w|
+150|Customer#000000150|zeoGShTjCwGPplOWFkLURrh41O0AZ8dwNEEN4 |18|28-328-564-7630|3849.48|MACHINERY|ole blithely among the furiously pending packages. furiously bold ideas wake fluffily ironic idea|
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/expected/part-0 b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/expected/part-0
new file mode 100755
index 0000000..ce3b00c
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/expected/part-0
@@ -0,0 +1,150 @@
+100|Customer#000000100|fptUABXcmkC5Wx|20|30-749-445-4907|9889.89|FURNITURE|was furiously fluffily quiet deposits. silent, pending requests boost against |
+101|Customer#000000101|sMmL2rNeHDltovSm Y|2|12-514-298-3699|7470.96|MACHINERY| sleep. pending packages detect slyly ironic pack|
+102|Customer#000000102|UAtflJ06 fn9zBfKjInkQZlWtqaA|19|29-324-978-8538|8462.17|BUILDING|ously regular dependencies nag among the furiously express dinos. blithely final|
+103|Customer#000000103|8KIsQX4LJ7QMsj6DrtFtXu0nUEdV,8a|9|19-216-107-2107|2757.45|BUILDING|furiously pending notornis boost slyly around the blithely ironic ideas? final, even instructions cajole fl|
+104|Customer#000000104|9mcCK L7rt0SwiYtrbO88DiZS7U d7M|10|20-966-284-8065|-588.38|FURNITURE|rate carefully slyly special pla|
+105|Customer#000000105|4iSJe4L SPjg7kJj98Yz3z0B|10|20-793-553-6417|9091.82|MACHINERY|l pains cajole even accounts. quietly final instructi|
+106|Customer#000000106|xGCOEAUjUNG|1|11-751-989-4627|3288.42|MACHINERY|lose slyly. ironic accounts along the evenly regular theodolites wake about the special, final gifts. |
+107|Customer#000000107|Zwg64UZ,q7GRqo3zm7P1tZIRshBDz|15|25-336-529-9919|2514.15|AUTOMOBILE|counts cajole slyly. regular requests wake. furiously regular deposits about the blithely final fo|
+108|Customer#000000108|GPoeEvpKo1|5|15-908-619-7526|2259.38|BUILDING|refully ironic deposits sleep. regular, unusual requests wake slyly|
+109|Customer#000000109|OOOkYBgCMzgMQXUmkocoLb56rfrdWp2NE2c|16|26-992-422-8153|-716.10|BUILDING|es. fluffily final dependencies sleep along the blithely even pinto beans. final deposits haggle furiously furiou|
+10|Customer#000000010|6LrEaV6KR6PLVcgl2ArL Q3rqzLzcT1 v2|5|15-741-346-9870|2753.54|HOUSEHOLD|es regular deposits haggle. fur|
+110|Customer#000000110|mymPfgphaYXNYtk|10|20-893-536-2069|7462.99|AUTOMOBILE|nto beans cajole around the even, final deposits. quickly bold packages according to the furiously regular dept|
+111|Customer#000000111|CBSbPyOWRorloj2TBvrK9qp9tHBs|22|32-582-283-7528|6505.26|MACHINERY|ly unusual instructions detect fluffily special deposits-- theodolites nag carefully during the ironic dependencies|
+112|Customer#000000112|RcfgG3bO7QeCnfjqJT1|19|29-233-262-8382|2953.35|FURNITURE|rmanently unusual multipliers. blithely ruthless deposits are furiously along the|
+113|Customer#000000113|eaOl5UBXIvdY57rglaIzqvfPD,MYfK|12|22-302-930-4756|2912.00|BUILDING|usly regular theodolites boost furiously doggedly pending instructio|
+114|Customer#000000114|xAt 5f5AlFIU|14|24-805-212-7646|1027.46|FURNITURE|der the carefully express theodolites are after the packages. packages are. bli|
+115|Customer#000000115|0WFt1IXENmUT2BgbsB0ShVKJZt0HCBCbFl0aHc|8|18-971-699-1843|7508.92|HOUSEHOLD|sits haggle above the carefully ironic theodolite|
+116|Customer#000000116|yCuVxIgsZ3,qyK2rloThy3u|16|26-632-309-5792|8403.99|BUILDING|as. quickly final sauternes haggle slyly carefully even packages. brave, ironic pinto beans are above the furious|
+117|Customer#000000117|uNhM,PzsRA3S,5Y Ge5Npuhi|24|34-403-631-3505|3950.83|FURNITURE|affix. instructions are furiously sl|
+118|Customer#000000118|OVnFuHygK9wx3xpg8|18|28-639-943-7051|3582.37|AUTOMOBILE|uick packages alongside of the furiously final deposits haggle above the fluffily even foxes. blithely dogged dep|
+119|Customer#000000119|M1ETOIecuvH8DtM0Y0nryXfW|7|17-697-919-8406|3930.35|FURNITURE|express ideas. blithely ironic foxes thrash. special acco|
+11|Customer#000000011|PkWS 3HlXqwTuzrKg633BEi|23|33-464-151-3439|-272.60|BUILDING|ckages. requests sleep slyly. quickly even pinto beans promise above the slyly regular pinto beans. |
+120|Customer#000000120|zBNna00AEInqyO1|12|22-291-534-1571|363.75|MACHINERY| quickly. slyly ironic requests cajole blithely furiously final dependen|
+121|Customer#000000121|tv nCR2YKupGN73mQudO|17|27-411-990-2959|6428.32|BUILDING|uriously stealthy ideas. carefully final courts use carefully|
+122|Customer#000000122|yp5slqoNd26lAENZW3a67wSfXA6hTF|3|13-702-694-4520|7865.46|HOUSEHOLD| the special packages hinder blithely around the permanent requests. bold depos|
+123|Customer#000000123|YsOnaaER8MkvK5cpf4VSlq|5|15-817-151-1168|5897.83|BUILDING|ependencies. regular, ironic requests are fluffily regu|
+124|Customer#000000124|aTbyVAW5tCd,v09O|18|28-183-750-7809|1842.49|AUTOMOBILE|le fluffily even dependencies. quietly s|
+125|Customer#000000125|,wSZXdVR xxIIfm9s8ITyLl3kgjT6UC07GY0Y|19|29-261-996-3120|-234.12|FURNITURE|x-ray finally after the packages? regular requests c|
+126|Customer#000000126|ha4EHmbx3kg DYCsP6DFeUOmavtQlHhcfaqr|22|32-755-914-7592|1001.39|HOUSEHOLD|s about the even instructions boost carefully furiously ironic pearls. ruthless, |
+127|Customer#000000127|Xyge4DX2rXKxXyye1Z47LeLVEYMLf4Bfcj|21|31-101-672-2951|9280.71|MACHINERY|ic, unusual theodolites nod silently after the final, ironic instructions: pending r|
+128|Customer#000000128|AmKUMlJf2NRHcKGmKjLS|4|14-280-874-8044|-986.96|HOUSEHOLD|ing packages integrate across the slyly unusual dugouts. blithely silent ideas sublate carefully. blithely expr|
+129|Customer#000000129|q7m7rbMM0BpaCdmxloCgBDRCleXsXkdD8kf|7|17-415-148-7416|9127.27|HOUSEHOLD| unusual deposits boost carefully furiously silent ideas. pending accounts cajole slyly across|
+12|Customer#000000012|9PWKuhzT4Zr1Q|13|23-791-276-1263|3396.49|HOUSEHOLD| to the carefully final braids. blithely regular requests nag. ironic theodolites boost quickly along|
+130|Customer#000000130|RKPx2OfZy0Vn 8wGWZ7F2EAvmMORl1k8iH|9|19-190-993-9281|5073.58|HOUSEHOLD|ix slowly. express packages along the furiously ironic requests integrate daringly deposits. fur|
+131|Customer#000000131|jyN6lAjb1FtH10rMC,XzlWyCBrg75|11|21-840-210-3572|8595.53|HOUSEHOLD|jole special packages. furiously final dependencies about the furiously speci|
+132|Customer#000000132|QM5YabAsTLp9|4|14-692-150-9717|162.57|HOUSEHOLD|uickly carefully special theodolites. carefully regular requests against the blithely unusual instructions |
+133|Customer#000000133|IMCuXdpIvdkYO92kgDGuyHgojcUs88p|17|27-408-997-8430|2314.67|AUTOMOBILE|t packages. express pinto beans are blithely along the unusual, even theodolites. silent packages use fu|
+134|Customer#000000134|sUiZ78QCkTQPICKpA9OBzkUp2FM|11|21-200-159-5932|4608.90|BUILDING|yly fluffy foxes boost final ideas. b|
+135|Customer#000000135|oZK,oC0 fdEpqUML|19|29-399-293-6241|8732.91|FURNITURE| the slyly final accounts. deposits cajole carefully. carefully sly packag|
+136|Customer#000000136|QoLsJ0v5C1IQbh,DS1|7|17-501-210-4726|-842.39|FURNITURE|ackages sleep ironic, final courts. even requests above the blithely bold requests g|
+137|Customer#000000137|cdW91p92rlAEHgJafqYyxf1Q|16|26-777-409-5654|7838.30|HOUSEHOLD|carefully regular theodolites use. silent dolphins cajo|
+138|Customer#000000138|5uyLAeY7HIGZqtu66Yn08f|5|15-394-860-4589|430.59|MACHINERY|ts doze on the busy ideas. regular|
+139|Customer#000000139|3ElvBwudHKL02732YexGVFVt |9|19-140-352-1403|7897.78|MACHINERY|nstructions. quickly ironic ideas are carefully. bold, |
+13|Customer#000000013|nsXQu0oVjD7PM659uC3SRSp|3|13-761-547-5974|3857.34|BUILDING|ounts sleep carefully after the close frays. carefully bold notornis use ironic requests. blithely|
+140|Customer#000000140|XRqEPiKgcETII,iOLDZp5jA|4|14-273-885-6505|9963.15|MACHINERY|ies detect slyly ironic accounts. slyly ironic theodolites hag|
+141|Customer#000000141|5IW,WROVnikc3l7DwiUDGQNGsLBGOL6Dc0|1|11-936-295-6204|6706.14|FURNITURE|packages nag furiously. carefully unusual accounts snooze according to the fluffily regular pinto beans. slyly spec|
+142|Customer#000000142|AnJ5lxtLjioClr2khl9pb8NLxG2,|9|19-407-425-2584|2209.81|AUTOMOBILE|. even, express theodolites upo|
+143|Customer#000000143|681r22uL452zqk 8By7I9o9enQfx0|16|26-314-406-7725|2186.50|MACHINERY|across the blithely unusual requests haggle theodo|
+144|Customer#000000144|VxYZ3ebhgbltnetaGjNC8qCccjYU05 fePLOno8y|1|11-717-379-4478|6417.31|MACHINERY|ges. slyly regular accounts are slyly. bold, idle reque|
+145|Customer#000000145|kQjHmt2kcec cy3hfMh969u|13|23-562-444-8454|9748.93|HOUSEHOLD|ests? express, express instructions use. blithely fina|
+146|Customer#000000146|GdxkdXG9u7iyI1,,y5tq4ZyrcEy|3|13-835-723-3223|3328.68|FURNITURE|ffily regular dinos are slyly unusual requests. slyly specia|
+147|Customer#000000147|6VvIwbVdmcsMzuu,C84GtBWPaipGfi7DV|18|28-803-187-4335|8071.40|AUTOMOBILE|ress packages above the blithely regular packages sleep fluffily blithely ironic accounts. |
+148|Customer#000000148|BhSPlEWGvIJyT9swk vCWE|11|21-562-498-6636|2135.60|HOUSEHOLD|ing to the carefully ironic requests. carefully regular dependencies about the theodolites wake furious|
+149|Customer#000000149|3byTHCp2mNLPigUrrq|19|29-797-439-6760|8959.65|AUTOMOBILE|al instructions haggle against the slyly bold w|
+14|Customer#000000014|KXkletMlL2JQEA |1|11-845-129-3851|5266.30|FURNITURE|, ironic packages across the unus|
+150|Customer#000000150|zeoGShTjCwGPplOWFkLURrh41O0AZ8dwNEEN4 |18|28-328-564-7630|3849.48|MACHINERY|ole blithely among the furiously pending packages. furiously bold ideas wake fluffily ironic idea|
+15|Customer#000000015|YtWggXoOLdwdo7b0y,BZaGUQMLJMX1Y,EC,6Dn|23|33-687-542-7601|2788.52|HOUSEHOLD| platelets. regular deposits detect asymptotes. blithely unusual packages nag slyly at the fluf|
+16|Customer#000000016|cYiaeMLZSMAOQ2 d0W,|10|20-781-609-3107|4681.03|FURNITURE|kly silent courts. thinly regular theodolites sleep fluffily after |
+17|Customer#000000017|izrh 6jdqtp2eqdtbkswDD8SG4SzXruMfIXyR7|2|12-970-682-3487|6.34|AUTOMOBILE|packages wake! blithely even pint|
+18|Customer#000000018|3txGO AiuFux3zT0Z9NYaFRnZt|6|16-155-215-1315|5494.43|BUILDING|s sleep. carefully even instructions nag furiously alongside of t|
+19|Customer#000000019|uc,3bHIx84H,wdrmLOjVsiqXCq2tr|18|28-396-526-5053|8914.71|HOUSEHOLD| nag. furiously careful packages are slyly at the accounts. furiously regular in|
+1|Customer#000000001|IVhzIApeRb ot,c,E|15|25-989-741-2988|711.56|BUILDING|to the even, regular platelets. regular, ironic epitaphs nag e|
+20|Customer#000000020|JrPk8Pqplj4Ne|22|32-957-234-8742|7603.40|FURNITURE|g alongside of the special excuses-- fluffily enticing packages wake |
+21|Customer#000000021|XYmVpr9yAHDEn|8|18-902-614-8344|1428.25|MACHINERY| quickly final accounts integrate blithely furiously u|
+22|Customer#000000022|QI6p41,FNs5k7RZoCCVPUTkUdYpB|3|13-806-545-9701|591.98|MACHINERY|s nod furiously above the furiously ironic ideas. |
+23|Customer#000000023|OdY W13N7Be3OC5MpgfmcYss0Wn6TKT|3|13-312-472-8245|3332.02|HOUSEHOLD|deposits. special deposits cajole slyly. fluffily special deposits about the furiously |
+24|Customer#000000024|HXAFgIAyjxtdqwimt13Y3OZO 4xeLe7U8PqG|13|23-127-851-8031|9255.67|MACHINERY|into beans. fluffily final ideas haggle fluffily|
+25|Customer#000000025|Hp8GyFQgGHFYSilH5tBfe|12|22-603-468-3533|7133.70|FURNITURE|y. accounts sleep ruthlessly according to the regular theodolites. unusual instructions sleep. ironic, final|
+26|Customer#000000026|8ljrc5ZeMl7UciP|22|32-363-455-4837|5182.05|AUTOMOBILE|c requests use furiously ironic requests. slyly ironic dependencies us|
+27|Customer#000000027|IS8GIyxpBrLpMT0u7|3|13-137-193-2709|5679.84|BUILDING| about the carefully ironic pinto beans. accoun|
+28|Customer#000000028|iVyg0daQ,Tha8x2WPWA9m2529m|8|18-774-241-1462|1007.18|FURNITURE| along the regular deposits. furiously final pac|
+29|Customer#000000029|sJ5adtfyAkCK63df2,vF25zyQMVYE34uh|0|10-773-203-7342|7618.27|FURNITURE|its after the carefully final platelets x-ray against |
+2|Customer#000000002|XSTf4,NCwDVaWNe6tEgvwfmRchLXak|13|23-768-687-3665|121.65|AUTOMOBILE|l accounts. blithely ironic theodolites integrate boldly: caref|
+30|Customer#000000030|nJDsELGAavU63Jl0c5NKsKfL8rIJQQkQnYL2QJY|1|11-764-165-5076|9321.01|BUILDING|lithely final requests. furiously unusual account|
+31|Customer#000000031|LUACbO0viaAv6eXOAebryDB xjVst|23|33-197-837-7094|5236.89|HOUSEHOLD|s use among the blithely pending depo|
+32|Customer#000000032|jD2xZzi UmId,DCtNBLXKj9q0Tlp2iQ6ZcO3J|15|25-430-914-2194|3471.53|BUILDING|cial ideas. final, furious requests across the e|
+33|Customer#000000033|qFSlMuLucBmx9xnn5ib2csWUweg D|17|27-375-391-1280|-78.56|AUTOMOBILE|s. slyly regular accounts are furiously. carefully pending requests|
+34|Customer#000000034|Q6G9wZ6dnczmtOx509xgE,M2KV|15|25-344-968-5422|8589.70|HOUSEHOLD|nder against the even, pending accounts. even|
+35|Customer#000000035|TEjWGE4nBzJL2|17|27-566-888-7431|1228.24|HOUSEHOLD|requests. special, express requests nag slyly furiousl|
+36|Customer#000000036|3TvCzjuPzpJ0,DdJ8kW5U|21|31-704-669-5769|4987.27|BUILDING|haggle. enticing, quiet platelets grow quickly bold sheaves. carefully regular acc|
+37|Customer#000000037|7EV4Pwh,3SboctTWt|8|18-385-235-7162|-917.75|FURNITURE|ilent packages are carefully among the deposits. furiousl|
+38|Customer#000000038|a5Ee5e9568R8RLP 2ap7|12|22-306-880-7212|6345.11|HOUSEHOLD|lar excuses. closely even asymptotes cajole blithely excuses. carefully silent pinto beans sleep carefully fin|
+39|Customer#000000039|nnbRg,Pvy33dfkorYE FdeZ60|2|12-387-467-6509|6264.31|AUTOMOBILE|tions. slyly silent excuses slee|
+3|Customer#000000003|MG9kdTD2WBHm|1|11-719-748-3364|7498.12|AUTOMOBILE| deposits eat slyly ironic, even instructions. express foxes detect slyly. blithely even accounts abov|
+40|Customer#000000040|gOnGWAyhSV1ofv|3|13-652-915-8939|1335.30|BUILDING|rges impress after the slyly ironic courts. foxes are. blithely |
+41|Customer#000000041|IM9mzmyoxeBmvNw8lA7G3Ydska2nkZF|10|20-917-711-4011|270.95|HOUSEHOLD|ly regular accounts hang bold, silent packages. unusual foxes haggle slyly above the special, final depo|
+42|Customer#000000042|ziSrvyyBke|5|15-416-330-4175|8727.01|BUILDING|ssly according to the pinto beans: carefully special requests across the even, pending accounts wake special|
+43|Customer#000000043|ouSbjHk8lh5fKX3zGso3ZSIj9Aa3PoaFd|19|29-316-665-2897|9904.28|MACHINERY|ial requests: carefully pending foxes detect quickly. carefully final courts cajole quickly. carefully|
+44|Customer#000000044|Oi,dOSPwDu4jo4x,,P85E0dmhZGvNtBwi|16|26-190-260-5375|7315.94|AUTOMOBILE|r requests around the unusual, bold a|
+45|Customer#000000045|4v3OcpFgoOmMG,CbnF,4mdC|9|19-715-298-9917|9983.38|AUTOMOBILE|nto beans haggle slyly alongside of t|
+46|Customer#000000046|eaTXWWm10L9|6|16-357-681-2007|5744.59|AUTOMOBILE|ctions. accounts sleep furiously even requests. regular, regular accounts cajole blithely around the final pa|
+47|Customer#000000047|b0UgocSqEW5 gdVbhNT|2|12-427-271-9466|274.58|BUILDING|ions. express, ironic instructions sleep furiously ironic ideas. furi|
+48|Customer#000000048|0UU iPhBupFvemNB|0|10-508-348-5882|3792.50|BUILDING|re fluffily pending foxes. pending, bold platelets sleep slyly. even platelets cajo|
+49|Customer#000000049|cNgAeX7Fqrdf7HQN9EwjUa4nxT,68L FKAxzl|10|20-908-631-4424|4573.94|FURNITURE|nusual foxes! fluffily pending packages maintain to the regular |
+4|Customer#000000004|XxVSJsLAGtn|4|14-128-190-5944|2866.83|MACHINERY| requests. final, regular ideas sleep final accou|
+50|Customer#000000050|9SzDYlkzxByyJ1QeTI o|6|16-658-112-3221|4266.13|MACHINERY|ts. furiously ironic accounts cajole furiously slyly ironic dinos.|
+51|Customer#000000051|uR,wEaiTvo4|12|22-344-885-4251|855.87|FURNITURE|eposits. furiously regular requests integrate carefully packages. furious|
+52|Customer#000000052|7 QOqGqqSy9jfV51BC71jcHJSD0|11|21-186-284-5998|5630.28|HOUSEHOLD|ic platelets use evenly even accounts. stealthy theodolites cajole furiou|
+53|Customer#000000053|HnaxHzTfFTZs8MuCpJyTbZ47Cm4wFOOgib|15|25-168-852-5363|4113.64|HOUSEHOLD|ar accounts are. even foxes are blithely. fluffily pending deposits boost|
+54|Customer#000000054|,k4vf 5vECGWFy,hosTE,|4|14-776-370-4745|868.90|AUTOMOBILE|sual, silent accounts. furiously express accounts cajole special deposits. final, final accounts use furi|
+55|Customer#000000055|zIRBR4KNEl HzaiV3a i9n6elrxzDEh8r8pDom|10|20-180-440-8525|4572.11|MACHINERY|ully unusual packages wake bravely bold packages. unusual requests boost deposits! blithely ironic packages ab|
+56|Customer#000000056|BJYZYJQk4yD5B|10|20-895-685-6920|6530.86|FURNITURE|. notornis wake carefully. carefully fluffy requests are furiously even accounts. slyly expre|
+57|Customer#000000057|97XYbsuOPRXPWU|21|31-835-306-1650|4151.93|AUTOMOBILE|ove the carefully special packages. even, unusual deposits sleep slyly pend|
+58|Customer#000000058|g9ap7Dk1Sv9fcXEWjpMYpBZIRUohi T|13|23-244-493-2508|6478.46|HOUSEHOLD|ideas. ironic ideas affix furiously express, final instructions. regular excuses use quickly e|
+59|Customer#000000059|zLOCP0wh92OtBihgspOGl4|1|11-355-584-3112|3458.60|MACHINERY|ously final packages haggle blithely after the express deposits. furiou|
+5|Customer#000000005|KvpyuHCplrB84WgAiGV6sYpZq7Tj|3|13-750-942-6364|794.47|HOUSEHOLD|n accounts will have to unwind. foxes cajole accor|
+60|Customer#000000060|FyodhjwMChsZmUz7Jz0H|12|22-480-575-5866|2741.87|MACHINERY|latelets. blithely unusual courts boost furiously about the packages. blithely final instruct|
+61|Customer#000000061|9kndve4EAJxhg3veF BfXr7AqOsT39o gtqjaYE|17|27-626-559-8599|1536.24|FURNITURE|egular packages shall have to impress along the |
+62|Customer#000000062|upJK2Dnw13,|7|17-361-978-7059|595.61|MACHINERY|kly special dolphins. pinto beans are slyly. quickly regular accounts are furiously a|
+63|Customer#000000063|IXRSpVWWZraKII|21|31-952-552-9584|9331.13|AUTOMOBILE|ithely even accounts detect slyly above the fluffily ir|
+64|Customer#000000064|MbCeGY20kaKK3oalJD,OT|3|13-558-731-7204|-646.64|BUILDING|structions after the quietly ironic theodolites cajole be|
+65|Customer#000000065|RGT yzQ0y4l0H90P783LG4U95bXQFDRXbWa1sl,X|23|33-733-623-5267|8795.16|AUTOMOBILE|y final foxes serve carefully. theodolites are carefully. pending i|
+66|Customer#000000066|XbsEqXH1ETbJYYtA1A|22|32-213-373-5094|242.77|HOUSEHOLD|le slyly accounts. carefully silent packages benea|
+67|Customer#000000067|rfG0cOgtr5W8 xILkwp9fpCS8|9|19-403-114-4356|8166.59|MACHINERY|indle furiously final, even theodo|
+68|Customer#000000068|o8AibcCRkXvQFh8hF,7o|12|22-918-832-2411|6853.37|HOUSEHOLD| pending pinto beans impress realms. final dependencies |
+69|Customer#000000069|Ltx17nO9Wwhtdbe9QZVxNgP98V7xW97uvSH1prEw|9|19-225-978-5670|1709.28|HOUSEHOLD|thely final ideas around the quickly final dependencies affix carefully quickly final theodolites. final accounts c|
+6|Customer#000000006|sKZz0CsnMD7mp4Xd0YrBvx,LREYKUWAh yVn|20|30-114-968-4951|7638.57|AUTOMOBILE|tions. even deposits boost according to the slyly bold packages. final accounts cajole requests. furious|
+70|Customer#000000070|mFowIuhnHjp2GjCiYYavkW kUwOjIaTCQ|22|32-828-107-2832|4867.52|FURNITURE|fter the special asymptotes. ideas after the unusual frets cajole quickly regular pinto be|
+71|Customer#000000071|TlGalgdXWBmMV,6agLyWYDyIz9MKzcY8gl,w6t1B|7|17-710-812-5403|-611.19|HOUSEHOLD|g courts across the regular, final pinto beans are blithely pending ac|
+72|Customer#000000072|putjlmskxE,zs,HqeIA9Wqu7dhgH5BVCwDwHHcf|2|12-759-144-9689|-362.86|FURNITURE|ithely final foxes sleep always quickly bold accounts. final wat|
+73|Customer#000000073|8IhIxreu4Ug6tt5mog4|0|10-473-439-3214|4288.50|BUILDING|usual, unusual packages sleep busily along the furiou|
+74|Customer#000000074|IkJHCA3ZThF7qL7VKcrU nRLl,kylf |4|14-199-862-7209|2764.43|MACHINERY|onic accounts. blithely slow packages would haggle carefully. qui|
+75|Customer#000000075|Dh 6jZ,cwxWLKQfRKkiGrzv6pm|18|28-247-803-9025|6684.10|AUTOMOBILE| instructions cajole even, even deposits. finally bold deposits use above the even pains. slyl|
+76|Customer#000000076|m3sbCvjMOHyaOofH,e UkGPtqc4|0|10-349-718-3044|5745.33|FURNITURE|pecial deposits. ironic ideas boost blithely according to the closely ironic theodolites! furiously final deposits n|
+77|Customer#000000077|4tAE5KdMFGD4byHtXF92vx|17|27-269-357-4674|1738.87|BUILDING|uffily silent requests. carefully ironic asymptotes among the ironic hockey players are carefully bli|
+78|Customer#000000078|HBOta,ZNqpg3U2cSL0kbrftkPwzX|9|19-960-700-9191|7136.97|FURNITURE|ests. blithely bold pinto beans h|
+79|Customer#000000079|n5hH2ftkVRwW8idtD,BmM2|15|25-147-850-4166|5121.28|MACHINERY|es. packages haggle furiously. regular, special requests poach after the quickly express ideas. blithely pending re|
+7|Customer#000000007|TcGe5gaZNgVePxU5kRrvXBfkasDTea|18|28-190-982-9759|9561.95|AUTOMOBILE|ainst the ironic, express theodolites. express, even pinto beans among the exp|
+80|Customer#000000080|K,vtXp8qYB |0|10-267-172-7101|7383.53|FURNITURE|tect among the dependencies. bold accounts engage closely even pinto beans. ca|
+81|Customer#000000081|SH6lPA7JiiNC6dNTrR|20|30-165-277-3269|2023.71|BUILDING|r packages. fluffily ironic requests cajole fluffily. ironically regular theodolit|
+82|Customer#000000082|zhG3EZbap4c992Gj3bK,3Ne,Xn|18|28-159-442-5305|9468.34|AUTOMOBILE|s wake. bravely regular accounts are furiously. regula|
+83|Customer#000000083|HnhTNB5xpnSF20JBH4Ycs6psVnkC3RDf|22|32-817-154-4122|6463.51|BUILDING|ccording to the quickly bold warhorses. final, regular foxes integrate carefully. bold packages nag blithely ev|
+84|Customer#000000084|lpXz6Fwr9945rnbtMc8PlueilS1WmASr CB|11|21-546-818-3802|5174.71|FURNITURE|ly blithe foxes. special asymptotes haggle blithely against the furiously regular depo|
+85|Customer#000000085|siRerlDwiolhYR 8FgksoezycLj|5|15-745-585-8219|3386.64|FURNITURE|ronic ideas use above the slowly pendin|
+86|Customer#000000086|US6EGGHXbTTXPL9SBsxQJsuvy|0|10-677-951-2353|3306.32|HOUSEHOLD|quests. pending dugouts are carefully aroun|
+87|Customer#000000087|hgGhHVSWQl 6jZ6Ev|23|33-869-884-7053|6327.54|FURNITURE|hely ironic requests integrate according to the ironic accounts. slyly regular pla|
+88|Customer#000000088|wtkjBN9eyrFuENSMmMFlJ3e7jE5KXcg|16|26-516-273-2566|8031.44|AUTOMOBILE|s are quickly above the quickly ironic instructions; even requests about the carefully final deposi|
+89|Customer#000000089|dtR, y9JQWUO6FoJExyp8whOU|14|24-394-451-5404|1530.76|FURNITURE|counts are slyly beyond the slyly final accounts. quickly final ideas wake. r|
+8|Customer#000000008|I0B10bB0AymmC, 0PrRYBCP1yGJ8xcBPmWhl5|17|27-147-574-9335|6819.74|BUILDING|among the slyly regular theodolites kindle blithely courts. carefully even theodolites haggle slyly along the ide|
+90|Customer#000000090|QxCzH7VxxYUWwfL7|16|26-603-491-1238|7354.23|BUILDING|sly across the furiously even |
+91|Customer#000000091|S8OMYFrpHwoNHaGBeuS6E 6zhHGZiprw1b7 q|8|18-239-400-3677|4643.14|AUTOMOBILE|onic accounts. fluffily silent pinto beans boost blithely according to the fluffily exp|
+92|Customer#000000092|obP PULk2LH LqNF,K9hcbNqnLAkJVsl5xqSrY,|2|12-446-416-8471|1182.91|MACHINERY|. pinto beans hang slyly final deposits. ac|
+93|Customer#000000093|EHXBr2QGdh|7|17-359-388-5266|2182.52|MACHINERY|press deposits. carefully regular platelets r|
+94|Customer#000000094|IfVNIN9KtkScJ9dUjK3Pg5gY1aFeaXewwf|9|19-953-499-8833|5500.11|HOUSEHOLD|latelets across the bold, final requests sleep according to the fluffily bold accounts. unusual deposits amon|
+95|Customer#000000095|EU0xvmWvOmUUn5J,2z85DQyG7QCJ9Xq7|15|25-923-255-2929|5327.38|MACHINERY|ithely. ruthlessly final requests wake slyly alongside of the furiously silent pinto beans. even the|
+96|Customer#000000096|vWLOrmXhRR|8|18-422-845-1202|6323.92|AUTOMOBILE|press requests believe furiously. carefully final instructions snooze carefully. |
+97|Customer#000000097|OApyejbhJG,0Iw3j rd1M|17|27-588-919-5638|2164.48|AUTOMOBILE|haggle slyly. bold, special ideas are blithely above the thinly bold theo|
+98|Customer#000000098|7yiheXNSpuEAwbswDW|12|22-885-845-6889|-551.37|BUILDING|ages. furiously pending accounts are quickly carefully final foxes: busily pe|
+99|Customer#000000099|szsrOiPtCHVS97Lt|15|25-515-237-9232|4088.65|HOUSEHOLD|cajole slyly about the regular theodolites! furiously bold requests nag along the pending, regular packages. somas|
+9|Customer#000000009|xKiAFTjUsCuxfeleNqefumTrjS|8|18-338-906-3675|8324.07|FURNITURE|r theodolites according to the requests wake thinly excuses: pending requests haggle furiousl|
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/core-site.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/core-site.xml
new file mode 100644
index 0000000..47dfac5
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/core-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+    <name>fs.default.name</name>
+    <value>hdfs://127.0.0.1:31888</value>
+</property>
+<property>
+    <name>hadoop.tmp.dir</name>
+    <value>/tmp/hadoop</value>
+</property>
+
+
+</configuration>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/hdfs-site.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/hdfs-site.xml
new file mode 100644
index 0000000..8d29b1d
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/hdfs-site.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+<property>
+   <name>dfs.replication</name>
+   <value>1</value>
+</property>
+
+<property>
+	<name>dfs.block.size</name>
+	<value>65536</value>
+</property>
+
+</configuration>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/log4j.properties b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/log4j.properties
new file mode 100755
index 0000000..d5e6004
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/log4j.properties
@@ -0,0 +1,94 @@
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=FATAL,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=FATAL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# Rolling File Appender
+#
+
+#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Logfile size and and 30-day backups
+#log4j.appender.RFA.MaxFileSize=1MB
+#log4j.appender.RFA.MaxBackupIndex=30
+
+#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+#
+# FSNamesystem Audit logging
+# All audit events are logged at INFO level
+#
+log4j.logger.org.apache.hadoop.fs.FSNamesystem.audit=WARN
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/mapred-site.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/mapred-site.xml
new file mode 100644
index 0000000..39b6505
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/hadoop/conf/mapred-site.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+
+  <property>
+    <name>mapred.job.tracker</name>
+    <value>localhost:29007</value>
+  </property>
+  <property>
+     <name>mapred.tasktracker.map.tasks.maximum</name>
+     <value>20</value>
+  </property>
+   <property>
+      <name>mapred.tasktracker.reduce.tasks.maximum</name>
+      <value>20</value>
+   </property>
+   <property>
+      <name>mapred.max.split.size</name>
+      <value>2048</value>
+   </property>
+
+</configuration>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/topology.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/topology.xml
new file mode 100644
index 0000000..3a0ac7e
--- /dev/null
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/resources/topology.xml
@@ -0,0 +1,32 @@
+<cluster-topology>
+	<network-switch name="all">
+		<network-switch name="rack1">
+			<terminal name="10.0.0.1" />
+			<terminal name="10.0.0.5" />
+			<terminal name="10.0.0.9" />
+			<terminal name="10.0.0.13" />
+			<terminal name="10.0.0.17" />
+		</network-switch>
+		<network-switch name="rack2">
+			<terminal name="10.0.0.2" />
+			<terminal name="10.0.0.6" />
+			<terminal name="10.0.0.10" />
+			<terminal name="10.0.0.14" />
+			<terminal name="10.0.0.18" />
+		</network-switch>
+		<network-switch name="rack3">
+			<terminal name="10.0.0.3" />
+			<terminal name="10.0.0.7" />
+			<terminal name="10.0.0.11" />
+			<terminal name="10.0.0.15" />
+			<terminal name="10.0.0.19" />
+		</network-switch>
+		<network-switch name="rack4">
+			<terminal name="10.0.0.4" />
+			<terminal name="10.0.0.8" />
+			<terminal name="10.0.0.12" />
+			<terminal name="10.0.0.16" />
+			<terminal name="10.0.0.20" />
+		</network-switch>
+	</network-switch>
+</cluster-topology>
\ No newline at end of file
diff --git a/hyracks/hyracks-hdfs/pom.xml b/hyracks/hyracks-hdfs/pom.xml
new file mode 100644
index 0000000..5ed76e9
--- /dev/null
+++ b/hyracks/hyracks-hdfs/pom.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hyracks-hdfs</artifactId>
+  <packaging>pom</packaging>
+  <name>hyracks-hdfs</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.2.3-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>hyracks-hdfs-0.20.2</module>
+    <module>hyracks-hdfs-0.23.1</module>
+    <module>hyracks-hdfs-core</module>
+  </modules>
+</project>
diff --git a/hyracks/hyracks-ipc/pom.xml b/hyracks/hyracks-ipc/pom.xml
index cb0de08..6f5e09f 100644
--- a/hyracks/hyracks-ipc/pom.xml
+++ b/hyracks/hyracks-ipc/pom.xml
@@ -15,8 +15,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
index 50c05da..a8fc29e 100644
--- a/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
+++ b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
@@ -17,8 +17,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksNCStartMojo.java b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksNCStartMojo.java
index 47de024..fc06a68 100644
--- a/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksNCStartMojo.java
+++ b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/edu/uci/ics/hyracks/maven/plugin/HyracksNCStartMojo.java
@@ -55,6 +55,7 @@
         cmdLineBuffer.append(" -data-ip-address ").append(dataIpAddress);
         cmdLineBuffer.append(" -node-id ").append(nodeId);
         cmdLineBuffer.append(" -cluster-net-ip-address 127.0.0.1");
+        cmdLineBuffer.append(" -result-ip-address 127.0.0.1");
         if (ccPort != 0) {
             cmdLineBuffer.append(" -cc-port ").append(ccPort);
         }
diff --git a/hyracks/hyracks-net/pom.xml b/hyracks/hyracks-net/pom.xml
index a079306..fb486df 100644
--- a/hyracks/hyracks-net/pom.xml
+++ b/hyracks/hyracks-net/pom.xml
@@ -15,8 +15,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelSet.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelSet.java
index dabc8a4..8fa99be 100644
--- a/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelSet.java
+++ b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/ChannelSet.java
@@ -58,6 +58,7 @@
 
     ChannelControlBlock allocateChannel() throws NetException {
         synchronized (mConn) {
+       	    cleanupClosedChannels();
             int idx = allocationBitmap.nextClearBit(0);
             if (idx < 0 || idx >= ccbArray.length) {
                 cleanupClosedChannels();
@@ -231,4 +232,4 @@
             ccbArray = Arrays.copyOf(ccbArray, ccbArray.length * 2);
         }
     }
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemux.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemux.java
index c719bc4..e4df6b9 100644
--- a/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemux.java
+++ b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/muxdemux/MuxDemux.java
@@ -48,9 +48,9 @@
      * Constructor.
      * 
      * @param localAddress
-     *            - TCP/IP socket address to listen on
+     *            - TCP/IP socket address to listen on. Null for non-listening unidirectional sockets
      * @param listener
-     *            - Callback interface to report channel events
+     *            - Callback interface to report channel events. Null for non-listening unidirectional sockets
      * @param nThreads
      *            - Number of threads to use for data transfer
      * @param maxConnectionAttempts
diff --git a/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPEndpoint.java b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPEndpoint.java
index d13a17e..a9061e1 100644
--- a/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPEndpoint.java
+++ b/hyracks/hyracks-net/src/main/java/edu/uci/ics/hyracks/net/protocols/tcp/TCPEndpoint.java
@@ -45,15 +45,23 @@
     }
 
     public void start(InetSocketAddress localAddress) throws IOException {
-        serverSocketChannel = ServerSocketChannel.open();
-        ServerSocket serverSocket = serverSocketChannel.socket();
-        serverSocket.bind(localAddress);
-        this.localAddress = (InetSocketAddress) serverSocket.getLocalSocketAddress();
+        // Setup a server socket listening channel only if the TCPEndpoint is a listening endpoint.
+        if (localAddress != null) {
+            serverSocketChannel = ServerSocketChannel.open();
+            ServerSocket serverSocket = serverSocketChannel.socket();
+            serverSocket.bind(localAddress);
+            this.localAddress = (InetSocketAddress) serverSocket.getLocalSocketAddress();
+        }
+
         ioThreads = new IOThread[nThreads];
         for (int i = 0; i < ioThreads.length; ++i) {
             ioThreads[i] = new IOThread();
         }
-        ioThreads[0].registerServerSocket(serverSocketChannel);
+
+        if (localAddress != null) {
+            ioThreads[0].registerServerSocket(serverSocketChannel);
+        }
+
         for (int i = 0; i < ioThreads.length; ++i) {
             ioThreads[i].start();
         }
diff --git a/hyracks/hyracks-server/pom.xml b/hyracks/hyracks-server/pom.xml
index 2699c27..e0fc40a 100644
--- a/hyracks/hyracks-server/pom.xml
+++ b/hyracks/hyracks-server/pom.xml
@@ -15,13 +15,15 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
         <executions>
           <execution>
             <configuration>
diff --git a/hyracks/hyracks-storage-am-btree/pom.xml b/hyracks/hyracks-storage-am-btree/pom.xml
index 118d46d..f251d51 100644
--- a/hyracks/hyracks-storage-am-btree/pom.xml
+++ b/hyracks/hyracks-storage-am-btree/pom.xml
@@ -18,8 +18,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-storage-am-common/pom.xml b/hyracks/hyracks-storage-am-common/pom.xml
index 0b32733..dbc4f41d 100644
--- a/hyracks/hyracks-storage-am-common/pom.xml
+++ b/hyracks/hyracks-storage-am-common/pom.xml
@@ -18,8 +18,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-storage-am-invertedindex/pom.xml b/hyracks/hyracks-storage-am-invertedindex/pom.xml
index a647e9d..5fe2d96 100644
--- a/hyracks/hyracks-storage-am-invertedindex/pom.xml
+++ b/hyracks/hyracks-storage-am-invertedindex/pom.xml
@@ -18,8 +18,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
index d00bea6..6744f70 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
@@ -15,13 +15,13 @@
 
 package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
 
-import java.io.DataOutput;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
@@ -41,7 +41,7 @@
 
     private FrameTupleAccessor accessor;
     private ArrayTupleBuilder builder;
-    private DataOutput builderDos;
+    private GrowableArray builderFieldData;
     private FrameTupleAppender appender;
     private ByteBuffer writeBuffer;
 
@@ -60,7 +60,7 @@
         accessor = new FrameTupleAccessor(ctx.getFrameSize(), inputRecDesc);
         writeBuffer = ctx.allocateFrame();
         builder = new ArrayTupleBuilder(outputRecDesc.getFieldCount());
-        builderDos = builder.getDataOutput();
+        builderFieldData = builder.getFieldData();
         appender = new FrameTupleAppender(ctx.getFrameSize());
         appender.reset(writeBuffer, true);
         writer.open();
@@ -87,7 +87,7 @@
                     builder.reset();
                     try {
                         IToken token = tokenizer.getToken();
-                        token.serializeToken(builderDos);
+                        token.serializeToken(builderFieldData);
                         builder.addFieldEndOffset();
                     } catch (IOException e) {
                         throw new HyracksDataException(e.getMessage());
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java
index 99e76dc..3525fc3 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java
@@ -202,7 +202,9 @@
     public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
         // Create entry in btree for last inverted list.
         InvertedIndexBulkLoadContext ctx = (InvertedIndexBulkLoadContext) ictx;
-        createAndInsertBTreeTuple(ctx);
+        if (ctx.lastTuple.getFieldData(0) != null) {
+            createAndInsertBTreeTuple(ctx);
+        }
         btree.endBulkLoad(ctx.btreeBulkLoadCtx);
         ctx.deinit();
     }
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java
index 1f886a2..af5dad3 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java
@@ -15,7 +15,6 @@
 
 package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
 
-import java.io.DataOutput;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -28,6 +27,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
@@ -75,7 +75,7 @@
     protected RecordDescriptor queryTokenRecDesc = new RecordDescriptor(
             new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
     protected ArrayTupleBuilder queryTokenBuilder = new ArrayTupleBuilder(queryTokenRecDesc.getFieldCount());
-    protected DataOutput queryTokenDos = queryTokenBuilder.getDataOutput();
+    protected GrowableArray queryTokenFieldData = queryTokenBuilder.getFieldData();
     protected FrameTupleAppender queryTokenAppender;
     protected ByteBuffer queryTokenFrame;
 
@@ -158,7 +158,7 @@
             queryTokenBuilder.reset();
             try {
                 IToken token = queryTokenizer.getToken();
-                token.serializeToken(queryTokenDos);
+                token.serializeToken(queryTokenFieldData);
                 queryTokenBuilder.addFieldEndOffset();
                 // WARNING: assuming one frame is big enough to hold all tokens
                 queryTokenAppender.append(queryTokenBuilder.getFieldEndOffsets(), queryTokenBuilder.getByteArray(), 0,
@@ -294,7 +294,8 @@
         boolean advanceCursor = true;
         boolean advancePrevResult = false;
         int resultTidx = 0;
-
+        currentNumResults = 0;
+        
         resultFrameTupleAcc.reset(prevCurrentBuffer);
         resultFrameTupleApp.reset(newCurrentBuffer, true);
 
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8Token.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8Token.java
index 65afa65..2f60952 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8Token.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8Token.java
@@ -22,6 +22,7 @@
 import java.io.IOException;
 
 import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
 
 public abstract class AbstractUTF8Token implements IToken {
     public static final int GOLDEN_RATIO_32 = 0x09e3779b9;
@@ -97,8 +98,8 @@
     }
 
     @Override
-    public void serializeTokenCount(DataOutput dos) throws IOException {
-        handleCountTypeTag(dos);
-        dos.writeInt(tokenCount);
+    public void serializeTokenCount(GrowableArray out) throws IOException {
+        handleCountTypeTag(out.getDataOutput());
+        out.getDataOutput().writeInt(tokenCount);
     }
 }
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramToken.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramToken.java
index b7bb828..a1a4354 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramToken.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramToken.java
@@ -19,10 +19,10 @@
 
 package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
 
-import java.io.DataOutput;
 import java.io.IOException;
 
 import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
 
 public class HashedUTF8NGramToken extends UTF8NGramToken {
     public HashedUTF8NGramToken(byte tokenTypeTag, byte countTypeTag) {
@@ -30,8 +30,8 @@
     }
 
     @Override
-    public void serializeToken(DataOutput dos) throws IOException {
-        handleTokenTypeTag(dos);
+    public void serializeToken(GrowableArray out) throws IOException {
+        handleTokenTypeTag(out.getDataOutput());
 
         int hash = GOLDEN_RATIO_32;
 
@@ -59,6 +59,6 @@
         // token count
         hash += tokenCount;
 
-        dos.writeInt(hash);
+        out.getDataOutput().writeInt(hash);
     }
 }
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordToken.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordToken.java
index 42ed053..20405c6 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordToken.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordToken.java
@@ -19,10 +19,10 @@
 
 package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
 
-import java.io.DataOutput;
 import java.io.IOException;
 
 import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
 
 public class HashedUTF8WordToken extends UTF8WordToken {
 
@@ -76,12 +76,12 @@
     }
 
     @Override
-    public void serializeToken(DataOutput dos) throws IOException {
+    public void serializeToken(GrowableArray out) throws IOException {
         if (tokenTypeTag > 0) {
-            dos.write(tokenTypeTag);
+            out.getDataOutput().write(tokenTypeTag);
         }
 
         // serialize hash value
-        dos.writeInt(hash);
+        out.getDataOutput().writeInt(hash);
     }
 }
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IToken.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IToken.java
index c1840d7..47467a1 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IToken.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IToken.java
@@ -19,9 +19,10 @@
 
 package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
 
-import java.io.DataOutput;
 import java.io.IOException;
 
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
+
 public interface IToken {
 	public byte[] getData();
 
@@ -34,7 +35,7 @@
 	public void reset(byte[] data, int start, int length, int tokenLength,
 			int tokenCount);
 
-	public void serializeToken(DataOutput dos) throws IOException;
+	public void serializeToken(GrowableArray out) throws IOException;
 
-	public void serializeTokenCount(DataOutput dos) throws IOException;
+	public void serializeTokenCount(GrowableArray out) throws IOException;
 }
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramToken.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramToken.java
index 59cadc8..8cb9818 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramToken.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramToken.java
@@ -19,10 +19,10 @@
 
 package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
 
-import java.io.DataOutput;
 import java.io.IOException;
 
 import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
 import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
 
 public class UTF8NGramToken extends AbstractUTF8Token implements INGramToken {
@@ -49,34 +49,39 @@
     }
 
     @Override
-    public void serializeToken(DataOutput dos) throws IOException {
-        handleTokenTypeTag(dos);
+    public void serializeToken(GrowableArray out) throws IOException {
+        handleTokenTypeTag(out.getDataOutput());
+        int tokenUTF8LenOff = out.getLength();
 
         // regular chars
         int numRegChars = tokenLength - numPreChars - numPostChars;
 
         // assuming pre and post char need 1-byte each in utf8
-        int tokenUTF8Len = getLowerCaseUTF8Len(numRegChars) + numPreChars + numPostChars;
+        int tokenUTF8Len = numPreChars + numPostChars;
 
-        // write utf8 length indicator
-        StringUtils.writeUTF8Len(tokenUTF8Len, dos);
+        // Write dummy UTF length which will be correctly set later.
+        out.getDataOutput().writeShort(0);
 
         // pre chars
         for (int i = 0; i < numPreChars; i++) {
-            StringUtils.writeCharAsModifiedUTF8(PRECHAR, dos);
+            StringUtils.writeCharAsModifiedUTF8(PRECHAR, out.getDataOutput());
         }
 
         int pos = start;
         for (int i = 0; i < numRegChars; i++) {
             char c = Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
-            StringUtils.writeCharAsModifiedUTF8(c, dos);
+            tokenUTF8Len += StringUtils.writeCharAsModifiedUTF8(c, out.getDataOutput());
             pos += UTF8StringPointable.charSize(data, pos);
         }
 
         // post chars
         for (int i = 0; i < numPostChars; i++) {
-            StringUtils.writeCharAsModifiedUTF8(POSTCHAR, dos);
+            StringUtils.writeCharAsModifiedUTF8(POSTCHAR, out.getDataOutput());
         }
+
+        // Set UTF length of token.
+        out.getByteArray()[tokenUTF8LenOff] = (byte) ((tokenUTF8Len >>> 8) & 0xFF);
+        out.getByteArray()[tokenUTF8LenOff + 1] = (byte) ((tokenUTF8Len >>> 0) & 0xFF);
     }
 
     public void setNumPrePostChars(int numPreChars, int numPostChars) {
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordToken.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordToken.java
index 97a1e12..9d7fe7c 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordToken.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordToken.java
@@ -19,10 +19,10 @@
 
 package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
 
-import java.io.DataOutput;
 import java.io.IOException;
 
 import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
 import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
 
 public class UTF8WordToken extends AbstractUTF8Token {
@@ -32,16 +32,20 @@
     }
 
     @Override
-    public void serializeToken(DataOutput dos) throws IOException {
-        handleTokenTypeTag(dos);
-
-        int tokenUTF8Len = getLowerCaseUTF8Len(tokenLength);
-        StringUtils.writeUTF8Len(tokenUTF8Len, dos);
+    public void serializeToken(GrowableArray out) throws IOException {
+        handleTokenTypeTag(out.getDataOutput());
+        int tokenUTF8LenOff = out.getLength();
+        int tokenUTF8Len = 0;
+        // Write dummy UTF length which will be correctly set later.
+        out.getDataOutput().writeShort(0);
         int pos = start;
         for (int i = 0; i < tokenLength; i++) {
             char c = Character.toLowerCase(UTF8StringPointable.charAt(data, pos));
-            StringUtils.writeCharAsModifiedUTF8(c, dos);
+            tokenUTF8Len += StringUtils.writeCharAsModifiedUTF8(c, out.getDataOutput());
             pos += UTF8StringPointable.charSize(data, pos);
         }
+        // Set UTF length of token.
+        out.getByteArray()[tokenUTF8LenOff] = (byte) ((tokenUTF8Len >>> 8) & 0xFF);
+        out.getByteArray()[tokenUTF8LenOff + 1] = (byte) ((tokenUTF8Len >>> 0) & 0xFF);
     }
 }
diff --git a/hyracks/hyracks-storage-am-rtree/pom.xml b/hyracks/hyracks-storage-am-rtree/pom.xml
index 61620ec..6c2d734 100644
--- a/hyracks/hyracks-storage-am-rtree/pom.xml
+++ b/hyracks/hyracks-storage-am-rtree/pom.xml
@@ -18,8 +18,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
index 9818dce..d9b7b97 100644
--- a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
@@ -40,9 +40,10 @@
             IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> indexRegistryProvider,
             IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
             IBinaryComparatorFactory[] comparatorFactories, int[] keyFields,
-            IIndexDataflowHelperFactory dataflowHelperFactory, boolean retainInput, IOperationCallbackProvider opCallbackProvider) {
+            IIndexDataflowHelperFactory dataflowHelperFactory, boolean retainInput,
+            IOperationCallbackProvider opCallbackProvider) {
         super(spec, 1, 1, recDesc, storageManager, indexRegistryProvider, fileSplitProvider, typeTraits,
-                comparatorFactories, dataflowHelperFactory, null, false, opCallbackProvider);
+                comparatorFactories, dataflowHelperFactory, null, retainInput, opCallbackProvider);
         this.keyFields = keyFields;
     }
 
diff --git a/hyracks/hyracks-storage-common/pom.xml b/hyracks/hyracks-storage-common/pom.xml
index 289171a..3360097 100644
--- a/hyracks/hyracks-storage-common/pom.xml
+++ b/hyracks/hyracks-storage-common/pom.xml
@@ -18,8 +18,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-test-support/pom.xml b/hyracks/hyracks-test-support/pom.xml
index 25a5378..89233c9 100644
--- a/hyracks/hyracks-test-support/pom.xml
+++ b/hyracks/hyracks-test-support/pom.xml
@@ -18,8 +18,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java
index c122b25..0ca93b2 100644
--- a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java
@@ -20,6 +20,7 @@
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
 import edu.uci.ics.hyracks.api.dataflow.state.IStateObject;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.exceptions.HyracksException;
 import edu.uci.ics.hyracks.api.io.FileReference;
@@ -101,6 +102,11 @@
     }
 
     @Override
+    public IDatasetPartitionManager getDatasetPartitionManager() {
+        return null;
+    }
+
+    @Override
     public void sendApplicationMessageToCC(byte[] message, String nodeId) throws Exception {
         // TODO Auto-generated method stub
 
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
index 7b03a71..d0bb883 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
@@ -18,8 +18,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
index 2cf6ce2..59c8c46 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
@@ -18,9 +18,10 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
           <encoding>UTF-8</encoding>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java
index 5f15a91..3fb6407 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java
@@ -33,6 +33,7 @@
 import org.junit.Before;
 import org.junit.Test;
 
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.AbstractUTF8Token;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.HashedUTF8NGramTokenFactory;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IToken;
@@ -41,207 +42,196 @@
 
 public class NGramTokenizerTest {
 
-	private char PRECHAR = '#';
-	private char POSTCHAR = '$';
+    private char PRECHAR = '#';
+    private char POSTCHAR = '$';
 
-	private String str = "Jürgen S. Generic's Car";
-	private byte[] inputBuffer;
+    private String str = "Jürgen S. Generic's Car";
+    private byte[] inputBuffer;
 
-	private int gramLength = 3;
+    private int gramLength = 3;
 
-	private void getExpectedGrams(String s, int gramLength,
-			ArrayList<String> grams, boolean prePost) {
+    private void getExpectedGrams(String s, int gramLength, ArrayList<String> grams, boolean prePost) {
 
-		String tmp = s.toLowerCase();
-		if (prePost) {
-			StringBuilder preBuilder = new StringBuilder();
-			for (int i = 0; i < gramLength - 1; i++) {
-				preBuilder.append(PRECHAR);
-			}
-			String pre = preBuilder.toString();
+        String tmp = s.toLowerCase();
+        if (prePost) {
+            StringBuilder preBuilder = new StringBuilder();
+            for (int i = 0; i < gramLength - 1; i++) {
+                preBuilder.append(PRECHAR);
+            }
+            String pre = preBuilder.toString();
 
-			StringBuilder postBuilder = new StringBuilder();
-			for (int i = 0; i < gramLength - 1; i++) {
-				postBuilder.append(POSTCHAR);
-			}
-			String post = postBuilder.toString();
+            StringBuilder postBuilder = new StringBuilder();
+            for (int i = 0; i < gramLength - 1; i++) {
+                postBuilder.append(POSTCHAR);
+            }
+            String post = postBuilder.toString();
 
-			tmp = pre + s.toLowerCase() + post;
-		}
+            tmp = pre + s.toLowerCase() + post;
+        }
 
-		for (int i = 0; i < tmp.length() - gramLength + 1; i++) {
-			String gram = tmp.substring(i, i + gramLength);
-			grams.add(gram);
-		}
-	}
+        for (int i = 0; i < tmp.length() - gramLength + 1; i++) {
+            String gram = tmp.substring(i, i + gramLength);
+            grams.add(gram);
+        }
+    }
 
-	@Before
-	public void init() throws Exception {
-		// serialize string into bytes
-		ByteArrayOutputStream baos = new ByteArrayOutputStream();
-		DataOutput dos = new DataOutputStream(baos);
-		dos.writeUTF(str);
-		inputBuffer = baos.toByteArray();
-	}
+    @Before
+    public void init() throws Exception {
+        // serialize string into bytes
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        DataOutput dos = new DataOutputStream(baos);
+        dos.writeUTF(str);
+        inputBuffer = baos.toByteArray();
+    }
 
-	void runTestNGramTokenizerWithCountedHashedUTF8Tokens(boolean prePost)
-			throws IOException {
-		HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
-		NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(
-				gramLength, prePost, false, false, tokenFactory);
-		tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+    void runTestNGramTokenizerWithCountedHashedUTF8Tokens(boolean prePost) throws IOException {
+        HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
+        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, false,
+                false, tokenFactory);
+        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
 
-		ArrayList<String> expectedGrams = new ArrayList<String>();
-		getExpectedGrams(str, gramLength, expectedGrams, prePost);
-		ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
-		HashMap<String, Integer> gramCounts = new HashMap<String, Integer>();
-		for (String s : expectedGrams) {
-			Integer count = gramCounts.get(s);
-			if (count == null) {
-				count = 1;
-				gramCounts.put(s, count);
-			} else {
-				count++;
-			}
+        ArrayList<String> expectedGrams = new ArrayList<String>();
+        getExpectedGrams(str, gramLength, expectedGrams, prePost);
+        ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
+        HashMap<String, Integer> gramCounts = new HashMap<String, Integer>();
+        for (String s : expectedGrams) {
+            Integer count = gramCounts.get(s);
+            if (count == null) {
+                count = 1;
+                gramCounts.put(s, count);
+            } else {
+                count++;
+            }
 
-			int hash = tokenHash(s, count);
-			expectedHashedGrams.add(hash);
-		}
+            int hash = tokenHash(s, count);
+            expectedHashedGrams.add(hash);
+        }
 
-		int tokenCount = 0;
+        int tokenCount = 0;
 
-		while (tokenizer.hasNext()) {
-			tokenizer.next();
+        while (tokenizer.hasNext()) {
+            tokenizer.next();
 
-			// serialize hashed token
-			ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
-			DataOutput tokenDos = new DataOutputStream(tokenBaos);
+            // serialize hashed token
+            GrowableArray tokenStorage = new GrowableArray();
 
-			IToken token = tokenizer.getToken();
-			token.serializeToken(tokenDos);
+            IToken token = tokenizer.getToken();
+            token.serializeToken(tokenStorage);
 
-			// deserialize token
-			ByteArrayInputStream bais = new ByteArrayInputStream(
-					tokenBaos.toByteArray());
-			DataInput in = new DataInputStream(bais);
+            // deserialize token
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
+            DataInput in = new DataInputStream(bais);
 
-			Integer hashedGram = in.readInt();
+            Integer hashedGram = in.readInt();
 
-			// System.out.println(hashedGram);
+            // System.out.println(hashedGram);
 
-			Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
+            Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
 
-			tokenCount++;
-		}
-		// System.out.println("---------");
-	}
+            tokenCount++;
+        }
+        // System.out.println("---------");
+    }
 
-	void runTestNGramTokenizerWithHashedUTF8Tokens(boolean prePost)
-			throws IOException {
-		HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
-		NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(
-				gramLength, prePost, true, false, tokenFactory);
-		tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+    void runTestNGramTokenizerWithHashedUTF8Tokens(boolean prePost) throws IOException {
+        HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
+        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
+                tokenFactory);
+        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
 
-		ArrayList<String> expectedGrams = new ArrayList<String>();
-		getExpectedGrams(str, gramLength, expectedGrams, prePost);
-		ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
-		for (String s : expectedGrams) {
-			int hash = tokenHash(s, 1);
-			expectedHashedGrams.add(hash);
-		}
+        ArrayList<String> expectedGrams = new ArrayList<String>();
+        getExpectedGrams(str, gramLength, expectedGrams, prePost);
+        ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
+        for (String s : expectedGrams) {
+            int hash = tokenHash(s, 1);
+            expectedHashedGrams.add(hash);
+        }
 
-		int tokenCount = 0;
+        int tokenCount = 0;
 
-		while (tokenizer.hasNext()) {
-			tokenizer.next();
+        while (tokenizer.hasNext()) {
+            tokenizer.next();
 
-			// serialize hashed token
-			ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
-			DataOutput tokenDos = new DataOutputStream(tokenBaos);
+            // serialize hashed token
+            GrowableArray tokenStorage = new GrowableArray();
 
-			IToken token = tokenizer.getToken();
-			token.serializeToken(tokenDos);
+            IToken token = tokenizer.getToken();
+            token.serializeToken(tokenStorage);
 
-			// deserialize token
-			ByteArrayInputStream bais = new ByteArrayInputStream(
-					tokenBaos.toByteArray());
-			DataInput in = new DataInputStream(bais);
+            // deserialize token
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
+            DataInput in = new DataInputStream(bais);
 
-			Integer hashedGram = in.readInt();
+            Integer hashedGram = in.readInt();
 
-			// System.out.println(hashedGram);
+            // System.out.println(hashedGram);
 
-			Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
+            Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
 
-			tokenCount++;
-		}
-		// System.out.println("---------");
-	}
+            tokenCount++;
+        }
+        // System.out.println("---------");
+    }
 
-	void runTestNGramTokenizerWithUTF8Tokens(boolean prePost)
-			throws IOException {
-		UTF8NGramTokenFactory tokenFactory = new UTF8NGramTokenFactory();
-		NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(
-				gramLength, prePost, true, false, tokenFactory);
-		tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+    void runTestNGramTokenizerWithUTF8Tokens(boolean prePost) throws IOException {
+        UTF8NGramTokenFactory tokenFactory = new UTF8NGramTokenFactory();
+        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
+                tokenFactory);
+        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
 
-		ArrayList<String> expectedGrams = new ArrayList<String>();
-		getExpectedGrams(str, gramLength, expectedGrams, prePost);
+        ArrayList<String> expectedGrams = new ArrayList<String>();
+        getExpectedGrams(str, gramLength, expectedGrams, prePost);
 
-		int tokenCount = 0;
+        int tokenCount = 0;
 
-		while (tokenizer.hasNext()) {
-			tokenizer.next();
+        while (tokenizer.hasNext()) {
+            tokenizer.next();
 
-			// serialize hashed token
-			ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
-			DataOutput tokenDos = new DataOutputStream(tokenBaos);
+            // serialize hashed token
+            GrowableArray tokenStorage = new GrowableArray();
 
-			IToken token = tokenizer.getToken();
-			token.serializeToken(tokenDos);
+            IToken token = tokenizer.getToken();
+            token.serializeToken(tokenStorage);
 
-			// deserialize token
-			ByteArrayInputStream bais = new ByteArrayInputStream(
-					tokenBaos.toByteArray());
-			DataInput in = new DataInputStream(bais);
+            // deserialize token
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
+            DataInput in = new DataInputStream(bais);
 
-			String strGram = in.readUTF();
+            String strGram = in.readUTF();
 
-			// System.out.println("\"" + strGram + "\"");
+            // System.out.println("\"" + strGram + "\"");
 
-			Assert.assertEquals(expectedGrams.get(tokenCount), strGram);
+            Assert.assertEquals(expectedGrams.get(tokenCount), strGram);
 
-			tokenCount++;
-		}
-		// System.out.println("---------");
-	}
+            tokenCount++;
+        }
+        // System.out.println("---------");
+    }
 
-	@Test
-	public void testNGramTokenizerWithCountedHashedUTF8Tokens()
-			throws Exception {
-		runTestNGramTokenizerWithCountedHashedUTF8Tokens(false);
-		runTestNGramTokenizerWithCountedHashedUTF8Tokens(true);
-	}
+    @Test
+    public void testNGramTokenizerWithCountedHashedUTF8Tokens() throws Exception {
+        runTestNGramTokenizerWithCountedHashedUTF8Tokens(false);
+        runTestNGramTokenizerWithCountedHashedUTF8Tokens(true);
+    }
 
-	@Test
-	public void testNGramTokenizerWithHashedUTF8Tokens() throws Exception {
-		runTestNGramTokenizerWithHashedUTF8Tokens(false);
-		runTestNGramTokenizerWithHashedUTF8Tokens(true);
-	}
+    @Test
+    public void testNGramTokenizerWithHashedUTF8Tokens() throws Exception {
+        runTestNGramTokenizerWithHashedUTF8Tokens(false);
+        runTestNGramTokenizerWithHashedUTF8Tokens(true);
+    }
 
-	@Test
-	public void testNGramTokenizerWithUTF8Tokens() throws IOException {
-		runTestNGramTokenizerWithUTF8Tokens(false);
-		runTestNGramTokenizerWithUTF8Tokens(true);
-	}
+    @Test
+    public void testNGramTokenizerWithUTF8Tokens() throws IOException {
+        runTestNGramTokenizerWithUTF8Tokens(false);
+        runTestNGramTokenizerWithUTF8Tokens(true);
+    }
 
-	public int tokenHash(String token, int tokenCount) {
-		int h = AbstractUTF8Token.GOLDEN_RATIO_32;
-		for (int i = 0; i < token.length(); i++) {
-			h ^= token.charAt(i);
-			h *= AbstractUTF8Token.GOLDEN_RATIO_32;
-		}
-		return h + tokenCount;
-	}
+    public int tokenHash(String token, int tokenCount) {
+        int h = AbstractUTF8Token.GOLDEN_RATIO_32;
+        for (int i = 0; i < token.length(); i++) {
+            h ^= token.charAt(i);
+            h *= AbstractUTF8Token.GOLDEN_RATIO_32;
+        }
+        return h + tokenCount;
+    }
 }
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
index c3c9b99..47a068b 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
@@ -27,6 +27,7 @@
 
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
@@ -112,20 +113,19 @@
 	}
 
 	private class TokenIdPair implements Comparable<TokenIdPair> {
-		public ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
-		public DataOutputStream dos = new DataOutputStream(baaos);
+	    public final GrowableArray tokenStorage = new GrowableArray();
 		public int id;
 
 		TokenIdPair(IToken token, int id) throws IOException {
-			token.serializeToken(dos);
+			token.serializeToken(tokenStorage);
 			this.id = id;
 		}
 
 		@Override
 		public int compareTo(TokenIdPair o) {
-			int cmp = btreeBinCmps[0].compare(baaos.getByteArray(), 0,
-					baaos.getByteArray().length, o.baaos.getByteArray(), 0,
-					o.baaos.getByteArray().length);
+			int cmp = btreeBinCmps[0].compare(tokenStorage.getByteArray(), 0,
+			        tokenStorage.getByteArray().length, o.tokenStorage.getByteArray(), 0,
+					o.tokenStorage.getByteArray().length);
 			if (cmp == 0) {
 				return id - o.id;
 			} else {
@@ -157,8 +157,8 @@
 
 		for (TokenIdPair t : pairs) {
 			tb.reset();
-			tb.addField(t.baaos.getByteArray(), 0,
-					t.baaos.getByteArray().length);
+			tb.addField(t.tokenStorage.getByteArray(), 0,
+					t.tokenStorage.getByteArray().length);
 			IntegerSerializerDeserializer.INSTANCE.serialize(t.id, tb.getDataOutput());
 			tb.addFieldEndOffset();
 			tuple.reset(tb.getFieldEndOffsets(), tb.getByteArray());
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/WordTokenizerTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/WordTokenizerTest.java
index 53fb96d..810c5f5 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/WordTokenizerTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/WordTokenizerTest.java
@@ -34,6 +34,7 @@
 import org.junit.Before;
 import org.junit.Test;
 
+import edu.uci.ics.hyracks.data.std.util.GrowableArray;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.AbstractUTF8Token;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizer;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.HashedUTF8WordTokenFactory;
@@ -127,14 +128,13 @@
             tokenizer.next();
 
             // serialize token
-            ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
-            DataOutput tokenDos = new DataOutputStream(tokenBaos);
+            GrowableArray tokenStorage = new GrowableArray();
 
             IToken token = tokenizer.getToken();
-            token.serializeToken(tokenDos);
+            token.serializeToken(tokenStorage);
 
             // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenBaos.toByteArray());
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
             DataInput in = new DataInputStream(bais);
 
             Integer hashedToken = in.readInt();
@@ -159,14 +159,13 @@
             tokenizer.next();
 
             // serialize token
-            ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
-            DataOutput tokenDos = new DataOutputStream(tokenBaos);
+            GrowableArray tokenStorage = new GrowableArray();
 
             IToken token = tokenizer.getToken();
-            token.serializeToken(tokenDos);
+            token.serializeToken(tokenStorage);
 
             // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenBaos.toByteArray());
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
             DataInput in = new DataInputStream(bais);
 
             Integer hashedToken = in.readInt();
@@ -191,14 +190,13 @@
             tokenizer.next();
 
             // serialize hashed token
-            ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
-            DataOutput tokenDos = new DataOutputStream(tokenBaos);
+            GrowableArray tokenStorage = new GrowableArray();
 
             IToken token = tokenizer.getToken();
-            token.serializeToken(tokenDos);
+            token.serializeToken(tokenStorage);
 
             // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenBaos.toByteArray());
+            ByteArrayInputStream bais = new ByteArrayInputStream(tokenStorage.getByteArray());
             DataInput in = new DataInputStream(bais);
 
             String strToken = in.readUTF();
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
index ea86042..7b1a3f3 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
@@ -18,8 +18,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
index bd10e13..8e429f9 100644
--- a/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
@@ -18,8 +18,9 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/hyracks-yarn/hyracks-yarn-am/pom.xml b/hyracks/hyracks-yarn/hyracks-yarn-am/pom.xml
index 9e453a6..d33ddc5 100644
--- a/hyracks/hyracks-yarn/hyracks-yarn-am/pom.xml
+++ b/hyracks/hyracks-yarn/hyracks-yarn-am/pom.xml
@@ -14,8 +14,8 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
         </configuration>
       </plugin>
       <plugin>
diff --git a/hyracks/hyracks-yarn/hyracks-yarn-client/pom.xml b/hyracks/hyracks-yarn/hyracks-yarn-client/pom.xml
index 08935a7..649aa6c 100644
--- a/hyracks/hyracks-yarn/hyracks-yarn-client/pom.xml
+++ b/hyracks/hyracks-yarn/hyracks-yarn-client/pom.xml
@@ -14,8 +14,8 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
         </configuration>
       </plugin>
       <plugin>
diff --git a/hyracks/hyracks-yarn/hyracks-yarn-common/pom.xml b/hyracks/hyracks-yarn/hyracks-yarn-common/pom.xml
index 3aaf4a2..fe210fd 100644
--- a/hyracks/hyracks-yarn/hyracks-yarn-common/pom.xml
+++ b/hyracks/hyracks-yarn/hyracks-yarn-common/pom.xml
@@ -14,8 +14,8 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>2.0.2</version>
         <configuration>
-          <source>1.6</source>
-          <target>1.6</target>
+          <source>1.7</source>
+          <target>1.7</target>
         </configuration>
       </plugin>
     </plugins>
diff --git a/hyracks/pom.xml b/hyracks/pom.xml
index 570421e..b699542 100644
--- a/hyracks/pom.xml
+++ b/hyracks/pom.xml
@@ -1,4 +1,4 @@
-
+<?xml version="1.0" encoding="UTF-8"?>
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks</groupId>
@@ -8,24 +8,9 @@
   <name>hyracks</name>
 
   <properties>
-    <jvm.extraargs />
+    <jvm.extraargs/>
   </properties>
 
-  <profiles>
-    <profile>
-      <id>macosx</id>
-      <activation>
-        <os>
-          <name>mac os x</name>
-        </os>
-        <jdk>1.7</jdk>
-      </activation>
-      <properties>
-        <jvm.extraargs>-Djava.nio.channels.spi.SelectorProvider=sun.nio.ch.KQueueSelectorProvider</jvm.extraargs>
-      </properties>
-    </profile>
-  </profiles>
-
   <build>
     <plugins>
       <plugin>
@@ -44,6 +29,7 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
+        <version>2.13</version>
         <configuration>
             <forkMode>pertest</forkMode>
             <argLine>-enableassertions -Djava.util.logging.config.file=${user.home}/logging.properties -Xdebug -Xrunjdwp:transport=dt_socket,server=y,address=8000,suspend=n ${jvm.extraargs}</argLine>
@@ -57,6 +43,7 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-changelog-plugin</artifactId>
+        <version>2.2</version>
       </plugin>
     </plugins>
   </reporting>
@@ -96,6 +83,8 @@
   <modules>
     <module>hyracks-ipc</module>
     <module>hyracks-api</module>
+    <module>hyracks-comm</module>
+    <module>hyracks-client</module>
     <module>hyracks-dataflow-common</module>
     <module>hyracks-dataflow-std</module>
     <module>hyracks-dataflow-hadoop</module>
@@ -116,5 +105,7 @@
     <module>hyracks-hadoop-compat</module>
     <!--module>hyracks-yarn</module-->
     <module>hyracks-maven-plugins</module>
+    <module>hyracks-hdfs</module>
+    <module>hyracks-dist</module>
   </modules>
 </project>
diff --git a/pom.xml b/pom.xml
index f9ec81c..e4920b3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -67,5 +67,6 @@
     <module>hyracks</module>
     <module>algebricks</module>
     <module>pregelix</module>
+    <module>hivesterix</module>
   </modules>
 </project>
diff --git a/pregelix/pom.xml b/pregelix/pom.xml
index 4b6a968..7d08fb7 100644
--- a/pregelix/pom.xml
+++ b/pregelix/pom.xml
@@ -44,6 +44,7 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
+        <version>2.13</version>
         <configuration>
             <forkMode>pertest</forkMode>
             <argLine>-enableassertions -Djava.util.logging.config.file=${user.home}/logging.properties -Xdebug -Xrunjdwp:transport=dt_socket,server=y,address=8000,suspend=n ${jvm.extraargs}</argLine>
@@ -57,6 +58,7 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-changelog-plugin</artifactId>
+        <version>2.2</version>
       </plugin>
     </plugins>
   </reporting>
diff --git a/pregelix/pregelix-api/pom.xml b/pregelix/pregelix-api/pom.xml
index 66a0186..8212e1c 100644
--- a/pregelix/pregelix-api/pom.xml
+++ b/pregelix/pregelix-api/pom.xml
@@ -21,8 +21,9 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
 				</configuration>
 			</plugin>
 			<plugin>
@@ -41,6 +42,7 @@
 			</plugin>
 			<plugin>
 				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
 				<configuration>
 					<filesets>
 						<fileset>
@@ -76,11 +78,11 @@
 			<scope>test</scope>
 		</dependency>
 		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-core</artifactId>
-			<version>0.20.2</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
+                        <groupId>edu.uci.ics.hyracks</groupId>
+                        <artifactId>hyracks-hdfs-core</artifactId>
+                        <version>0.2.3-SNAPSHOT</version>
+                        <type>jar</type>
+                        <scope>compile</scope>
+                </dependency>
 	</dependencies>
 </project>
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Edge.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Edge.java
index 4af35fe..e5f42fe 100644
--- a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Edge.java
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Edge.java
@@ -42,7 +42,7 @@
     private E edgeValue = null;
     /** Configuration - Used to instantiate classes */
     private Configuration conf = null;
-    /** Whether the edgeValue field is not null*/
+    /** Whether the edgeValue field is not null */
     private boolean hasEdgeValue = false;
 
     /**
@@ -115,8 +115,9 @@
         destVertexId.readFields(input);
         hasEdgeValue = input.readBoolean();
         if (hasEdgeValue) {
-            if (edgeValue == null)
+            if (edgeValue == null) {
                 edgeValue = (E) BspUtils.createEdgeValue(getConf());
+            }
             edgeValue.readFields(input);
         }
     }
@@ -128,8 +129,9 @@
         }
         destVertexId.write(output);
         output.writeBoolean(hasEdgeValue);
-        if (hasEdgeValue)
+        if (hasEdgeValue) {
             edgeValue.write(output);
+        }
     }
 
     @Override
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/MsgList.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/MsgList.java
index 734b1af..8d3d4c6 100644
--- a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/MsgList.java
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/MsgList.java
@@ -29,7 +29,7 @@
  */
 public class MsgList<M extends Writable> extends ArrayListWritable<M> {
     /** Defining a layout version for a serializable class. */
-    private static final long serialVersionUID = 100L;
+    private static final long serialVersionUID = 1L;
 
     /**
      * Default constructor.s
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Vertex.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Vertex.java
index 6856e9a..cd49184 100644
--- a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Vertex.java
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/Vertex.java
@@ -26,7 +26,7 @@
 
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
 
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -49,384 +49,525 @@
  */
 @SuppressWarnings("rawtypes")
 public abstract class Vertex<I extends WritableComparable, V extends Writable, E extends Writable, M extends Writable>
-        implements Writable {
-    private static long superstep = 0;
-    /** Class-wide number of vertices */
-    private static long numVertices = -1;
-    /** Class-wide number of edges */
-    private static long numEdges = -1;
-    /** Vertex id */
-    private I vertexId = null;
-    /** Vertex value */
-    private V vertexValue = null;
-    /** Map of destination vertices and their edge values */
-    private final List<Edge<I, E>> destEdgeList = new ArrayList<Edge<I, E>>();
-    /** If true, do not do anymore computation on this vertex. */
-    boolean halt = false;
-    /** List of incoming messages from the previous superstep */
-    private final List<M> msgList = new ArrayList<M>();
-    /** map context */
-    private static Mapper.Context context = null;
-    /** a delegate for hyracks stuff */
-    private VertexDelegate<I, V, E, M> delegate = new VertexDelegate<I, V, E, M>(this);
-    /** this vertex is updated or not */
-    private boolean updated = false;
-    /** has outgoing messages */
-    private boolean hasMessage = false;
+		implements Writable {
+	private static long superstep = 0;
+	/** Class-wide number of vertices */
+	private static long numVertices = -1;
+	/** Class-wide number of edges */
+	private static long numEdges = -1;
+	/** Vertex id */
+	private I vertexId = null;
+	/** Vertex value */
+	private V vertexValue = null;
+	/** Map of destination vertices and their edge values */
+	private final List<Edge<I, E>> destEdgeList = new ArrayList<Edge<I, E>>();
+	/** If true, do not do anymore computation on this vertex. */
+	boolean halt = false;
+	/** List of incoming messages from the previous superstep */
+	private final List<M> msgList = new ArrayList<M>();
+	/** map context */
+	private static TaskAttemptContext context = null;
+	/** a delegate for hyracks stuff */
+	private VertexDelegate<I, V, E, M> delegate = new VertexDelegate<I, V, E, M>(
+			this);
+	/** this vertex is updated or not */
+	private boolean updated = false;
+	/** has outgoing messages */
+	private boolean hasMessage = false;
+	/** created new vertex */
+	private boolean createdNewLiveVertex = false;
 
-    /**
-     * use object pool for re-using objects
-     */
-    private List<Edge<I, E>> edgePool = new ArrayList<Edge<I, E>>();
-    private List<M> msgPool = new ArrayList<M>();
-    private List<V> valuePool = new ArrayList<V>();
-    private int usedEdge = 0;
-    private int usedMessage = 0;
-    private int usedValue = 0;
+	/**
+	 * use object pool for re-using objects
+	 */
+	private List<Edge<I, E>> edgePool = new ArrayList<Edge<I, E>>();
+	private List<M> msgPool = new ArrayList<M>();
+	private List<V> valuePool = new ArrayList<V>();
+	private int usedEdge = 0;
+	private int usedMessage = 0;
+	private int usedValue = 0;
 
-    /**
-     * The key method that users need to implement
-     * 
-     * @param msgIterator
-     *            an iterator of incoming messages
-     */
-    public abstract void compute(Iterator<M> msgIterator);
+	/**
+	 * The key method that users need to implement
+	 * 
+	 * @param msgIterator
+	 *            an iterator of incoming messages
+	 */
+	public abstract void compute(Iterator<M> msgIterator);
 
-    /**
-     * Add an edge for the vertex.
-     * 
-     * @param targetVertexId
-     * @param edgeValue
-     * @return successful or not
-     */
-    public final boolean addEdge(I targetVertexId, E edgeValue) {
-        Edge<I, E> edge = this.allocateEdge();
-        edge.setDestVertexId(targetVertexId);
-        edge.setEdgeValue(edgeValue);
-        destEdgeList.add(edge);
-        return true;
-    }
+	/**
+	 * Add an edge for the vertex.
+	 * 
+	 * @param targetVertexId
+	 * @param edgeValue
+	 * @return successful or not
+	 */
+	public final boolean addEdge(I targetVertexId, E edgeValue) {
+		Edge<I, E> edge = this.allocateEdge();
+		edge.setDestVertexId(targetVertexId);
+		edge.setEdgeValue(edgeValue);
+		destEdgeList.add(edge);
+		updated = true;
+		return true;
+	}
 
-    /**
-     * Initialize a new vertex
-     * 
-     * @param vertexId
-     * @param vertexValue
-     * @param edges
-     * @param messages
-     */
-    public void initialize(I vertexId, V vertexValue, Map<I, E> edges, List<M> messages) {
-        if (vertexId != null) {
-            setVertexId(vertexId);
-        }
-        if (vertexValue != null) {
-            setVertexValue(vertexValue);
-        }
-        destEdgeList.clear();
-        if (edges != null && !edges.isEmpty()) {
-            for (Map.Entry<I, E> entry : edges.entrySet()) {
-                destEdgeList.add(new Edge<I, E>(entry.getKey(), entry.getValue()));
-            }
-        }
-        if (messages != null && !messages.isEmpty()) {
-            msgList.addAll(messages);
-        }
-    }
+	/**
+	 * Initialize a new vertex
+	 * 
+	 * @param vertexId
+	 * @param vertexValue
+	 * @param edges
+	 * @param messages
+	 */
+	public void initialize(I vertexId, V vertexValue, Map<I, E> edges,
+			List<M> messages) {
+		if (vertexId != null) {
+			setVertexId(vertexId);
+		}
+		if (vertexValue != null) {
+			setVertexValue(vertexValue);
+		}
+		destEdgeList.clear();
+		if (edges != null && !edges.isEmpty()) {
+			for (Map.Entry<I, E> entry : edges.entrySet()) {
+				destEdgeList.add(new Edge<I, E>(entry.getKey(), entry
+						.getValue()));
+			}
+		}
+		if (messages != null && !messages.isEmpty()) {
+			msgList.addAll(messages);
+		}
+	}
 
-    /**
-     * reset a vertex object: clear its internal states
-     */
-    public void reset() {
-        usedEdge = 0;
-        usedMessage = 0;
-        usedValue = 0;
-    }
+	/**
+	 * reset a vertex object: clear its internal states
+	 */
+	public void reset() {
+		usedEdge = 0;
+		usedMessage = 0;
+		usedValue = 0;
+		updated = false;
+	}
 
-    private Edge<I, E> allocateEdge() {
-        Edge<I, E> edge;
-        if (usedEdge < edgePool.size()) {
-            edge = edgePool.get(usedEdge);
-            usedEdge++;
-        } else {
-            edge = new Edge<I, E>();
-            edgePool.add(edge);
-            usedEdge++;
-        }
-        return edge;
-    }
+	/**
+	 * Set the vertex id
+	 * 
+	 * @param vertexId
+	 */
+	public final void setVertexId(I vertexId) {
+		this.vertexId = vertexId;
+		delegate.setVertexId(vertexId);
+	}
 
-    private M allocateMessage() {
-        M message;
-        if (usedMessage < msgPool.size()) {
-            message = msgPool.get(usedEdge);
-            usedMessage++;
-        } else {
-            message = BspUtils.<M> createMessageValue(getContext().getConfiguration());
-            msgPool.add(message);
-            usedMessage++;
-        }
-        return message;
-    }
+	/**
+	 * Get the vertex id
+	 * 
+	 * @return vertex id
+	 */
+	public final I getVertexId() {
+		return vertexId;
+	}
 
-    private V allocateValue() {
-        V value;
-        if (usedValue < valuePool.size()) {
-            value = valuePool.get(usedEdge);
-            usedValue++;
-        } else {
-            value = BspUtils.<V> createVertexValue(getContext().getConfiguration());
-            valuePool.add(value);
-            usedValue++;
-        }
-        return value;
-    }
+	/**
+	 * Get the vertex value
+	 * 
+	 * @return the vertex value
+	 */
+	public final V getVertexValue() {
+		return vertexValue;
+	}
 
-    /**
-     * Set the vertex id
-     * 
-     * @param vertexId
-     */
-    public final void setVertexId(I vertexId) {
-        this.vertexId = vertexId;
-        delegate.setVertexId(vertexId);
-    }
+	/**
+	 * Set the vertex value
+	 * 
+	 * @param vertexValue
+	 */
+	public final void setVertexValue(V vertexValue) {
+		this.vertexValue = vertexValue;
+		this.updated = true;
+	}
 
-    /**
-     * Get the vertex id
-     * 
-     * @return vertex id
-     */
-    public final I getVertexId() {
-        return vertexId;
-    }
+	/***
+	 * Send a message to a specific vertex
+	 * 
+	 * @param id
+	 *            the receiver vertex id
+	 * @param msg
+	 *            the message
+	 */
+	public final void sendMsg(I id, M msg) {
+		if (msg == null) {
+			throw new IllegalArgumentException(
+					"sendMsg: Cannot send null message to " + id);
+		}
+		delegate.sendMsg(id, msg);
+		this.hasMessage = true;
+	}
 
-    /**
-     * Set the global superstep for all the vertices (internal use)
-     * 
-     * @param superstep
-     *            New superstep
-     */
-    public static void setSuperstep(long superstep) {
-        Vertex.superstep = superstep;
-    }
+	/**
+	 * Send a message to all direct outgoing neighbors
+	 * 
+	 * @param msg
+	 *            the message
+	 */
+	public final void sendMsgToAllEdges(M msg) {
+		if (msg == null) {
+			throw new IllegalArgumentException(
+					"sendMsgToAllEdges: Cannot send null message to all edges");
+		}
+		for (Edge<I, E> edge : destEdgeList) {
+			sendMsg(edge.getDestVertexId(), msg);
+		}
+	}
 
-    public static long getCurrentSuperstep() {
-        return superstep;
-    }
+	/**
+	 * Vote to halt. Once all vertex vote to halt and no more messages, a
+	 * Pregelix job will terminate.
+	 */
+	public final void voteToHalt() {
+		halt = true;
+		updated = true;
+	}
 
-    public final long getSuperstep() {
-        return superstep;
-    }
+	/**
+	 * @return the vertex is halted (true) or not (false)
+	 */
+	public final boolean isHalted() {
+		return halt;
+	}
 
-    public final V getVertexValue() {
-        return vertexValue;
-    }
+	@Override
+	final public void readFields(DataInput in) throws IOException {
+		reset();
+		if (vertexId == null)
+			vertexId = BspUtils.<I> createVertexIndex(getContext()
+					.getConfiguration());
+		vertexId.readFields(in);
+		delegate.setVertexId(vertexId);
+		boolean hasVertexValue = in.readBoolean();
 
-    public final void setVertexValue(V vertexValue) {
-        this.vertexValue = vertexValue;
-        this.updated = true;
-    }
+		if (hasVertexValue) {
+			vertexValue = allocateValue();
+			vertexValue.readFields(in);
+			delegate.setVertex(this);
+		}
+		destEdgeList.clear();
+		long edgeMapSize = SerDeUtils.readVLong(in);
+		for (long i = 0; i < edgeMapSize; ++i) {
+			Edge<I, E> edge = allocateEdge();
+			edge.setConf(getContext().getConfiguration());
+			edge.readFields(in);
+			addEdge(edge);
+		}
+		msgList.clear();
+		long msgListSize = SerDeUtils.readVLong(in);
+		for (long i = 0; i < msgListSize; ++i) {
+			M msg = allocateMessage();
+			msg.readFields(in);
+			msgList.add(msg);
+		}
+		halt = in.readBoolean();
+		updated = false;
+		hasMessage = false;
+		createdNewLiveVertex = false;
+	}
 
-    /**
-     * Set the total number of vertices from the last superstep.
-     * 
-     * @param numVertices
-     *            Aggregate vertices in the last superstep
-     */
-    public static void setNumVertices(long numVertices) {
-        Vertex.numVertices = numVertices;
-    }
+	@Override
+	public void write(DataOutput out) throws IOException {
+		vertexId.write(out);
+		out.writeBoolean(vertexValue != null);
+		if (vertexValue != null) {
+			vertexValue.write(out);
+		}
+		SerDeUtils.writeVLong(out, destEdgeList.size());
+		for (Edge<I, E> edge : destEdgeList) {
+			edge.write(out);
+		}
+		SerDeUtils.writeVLong(out, msgList.size());
+		for (M msg : msgList) {
+			msg.write(out);
+		}
+		out.writeBoolean(halt);
+	}
 
-    public final long getNumVertices() {
-        return numVertices;
-    }
+	/**
+	 * Get the list of incoming messages
+	 * 
+	 * @return the list of messages
+	 */
+	public List<M> getMsgList() {
+		return msgList;
+	}
 
-    /**
-     * Set the total number of edges from the last superstep.
-     * 
-     * @param numEdges
-     *            Aggregate edges in the last superstep
-     */
-    public static void setNumEdges(long numEdges) {
-        Vertex.numEdges = numEdges;
-    }
+	/**
+	 * Get outgoing edge list
+	 * 
+	 * @return a list of outgoing edges
+	 */
+	public List<Edge<I, E>> getEdges() {
+		return this.destEdgeList;
+	}
 
-    public final long getNumEdges() {
-        return numEdges;
-    }
+	@Override
+	@SuppressWarnings("unchecked")
+	public String toString() {
+		Collections.sort(destEdgeList);
+		StringBuffer edgeBuffer = new StringBuffer();
+		edgeBuffer.append("(");
+		for (Edge<I, E> edge : destEdgeList) {
+			edgeBuffer.append(edge.getDestVertexId()).append(",");
+		}
+		edgeBuffer.append(")");
+		return "Vertex(id=" + getVertexId() + ",value=" + getVertexValue()
+				+ ", edges=" + edgeBuffer + ")";
+	}
 
-    /***
-     * Send a message to a specific vertex
-     * 
-     * @param id
-     *            the receiver vertex id
-     * @param msg
-     *            the message
-     */
-    public final void sendMsg(I id, M msg) {
-        if (msg == null) {
-            throw new IllegalArgumentException("sendMsg: Cannot send null message to " + id);
-        }
-        delegate.sendMsg(id, msg);
-        this.hasMessage = true;
-    }
+	/**
+	 * Get the number of outgoing edges
+	 * 
+	 * @return the number of outging edges
+	 */
+	public int getNumOutEdges() {
+		return destEdgeList.size();
+	}
 
-    /**
-     * Send a message to all direct outgoing neighbors
-     * 
-     * @param msg
-     *            the message
-     */
-    public final void sendMsgToAllEdges(M msg) {
-        if (msg == null) {
-            throw new IllegalArgumentException("sendMsgToAllEdges: Cannot send null message to all edges");
-        }
-        for (Edge<I, E> edge : destEdgeList) {
-            sendMsg(edge.getDestVertexId(), msg);
-        }
-    }
+	/**
+	 * Pregelix internal use only
+	 * 
+	 * @param writers
+	 */
+	public void setOutputWriters(List<IFrameWriter> writers) {
+		delegate.setOutputWriters(writers);
+	}
 
-    /**
-     * Vote to halt. Once all vertex vote to halt and no more messages, a
-     * Pregelix job will terminate.
-     */
-    public final void voteToHalt() {
-        halt = true;
-    }
+	/**
+	 * Pregelix internal use only
+	 * 
+	 * @param writers
+	 */
+	public void setOutputAppenders(List<FrameTupleAppender> appenders) {
+		delegate.setOutputAppenders(appenders);
+	}
 
-    /**
-     * @return the vertex is halted (true) or not (false)
-     */
-    public final boolean isHalted() {
-        return halt;
-    }
+	/**
+	 * Pregelix internal use only
+	 * 
+	 * @param writers
+	 */
+	public void setOutputTupleBuilders(List<ArrayTupleBuilder> tbs) {
+		delegate.setOutputTupleBuilders(tbs);
+	}
 
-    @Override
-    final public void readFields(DataInput in) throws IOException {
-        reset();
-        if (vertexId == null)
-            vertexId = BspUtils.<I> createVertexIndex(getContext().getConfiguration());
-        vertexId.readFields(in);
-        delegate.setVertexId(vertexId);
-        boolean hasVertexValue = in.readBoolean();
-        if (hasVertexValue) {
-            vertexValue = allocateValue();
-            vertexValue.readFields(in);
-            delegate.setVertex(this);
-        }
-        destEdgeList.clear();
-        long edgeMapSize = SerDeUtils.readVLong(in);
-        for (long i = 0; i < edgeMapSize; ++i) {
-            Edge<I, E> edge = allocateEdge();
-            edge.setConf(getContext().getConfiguration());
-            edge.readFields(in);
-            addEdge(edge);
-        }
-        msgList.clear();
-        long msgListSize = SerDeUtils.readVLong(in);
-        for (long i = 0; i < msgListSize; ++i) {
-            M msg = allocateMessage();
-            msg.readFields(in);
-            msgList.add(msg);
-        }
-        halt = in.readBoolean();
-        updated = false;
-        hasMessage = false;
-    }
+	/**
+	 * Pregelix internal use only
+	 * 
+	 * @param writers
+	 */
+	public void finishCompute() throws IOException {
+		delegate.finishCompute();
+	}
 
-    @Override
-    public void write(DataOutput out) throws IOException {
-        vertexId.write(out);
-        out.writeBoolean(vertexValue != null);
-        if (vertexValue != null) {
-            vertexValue.write(out);
-        }
-        SerDeUtils.writeVLong(out, destEdgeList.size());
-        for (Edge<I, E> edge : destEdgeList) {
-            edge.write(out);
-        }
-        SerDeUtils.writeVLong(out, msgList.size());
-        for (M msg : msgList) {
-            msg.write(out);
-        }
-        out.writeBoolean(halt);
-    }
+	/**
+	 * Pregelix internal use only
+	 */
+	public boolean hasUpdate() {
+		return this.updated;
+	}
 
-    private boolean addEdge(Edge<I, E> edge) {
-        edge.setConf(getContext().getConfiguration());
-        destEdgeList.add(edge);
-        return true;
-    }
+	/**
+	 * Pregelix internal use only
+	 */
+	public boolean hasMessage() {
+		return this.hasMessage;
+	}
 
-    /**
-     * Get the list of incoming messages
-     * 
-     * @return the list of messages
-     */
-    public List<M> getMsgList() {
-        return msgList;
-    }
+	/**
+	 * Pregelix internal use only
+	 */
+	public boolean createdNewLiveVertex() {
+		return this.createdNewLiveVertex;
+	}
 
-    /**
-     * Get outgoing edge list
-     * 
-     * @return a list of outgoing edges
-     */
-    public List<Edge<I, E>> getEdges() {
-        return this.destEdgeList;
-    }
+	/**
+	 * sort the edges
+	 */
+	@SuppressWarnings("unchecked")
+	public void sortEdges() {
+		updated = true;
+		Collections.sort(destEdgeList);
+	}
 
-    public final Mapper<?, ?, ?, ?>.Context getContext() {
-        return context;
-    }
+	/**
+	 * Allocate a new edge from the edge pool
+	 */
+	private Edge<I, E> allocateEdge() {
+		Edge<I, E> edge;
+		if (usedEdge < edgePool.size()) {
+			edge = edgePool.get(usedEdge);
+			usedEdge++;
+		} else {
+			edge = new Edge<I, E>();
+			edgePool.add(edge);
+			usedEdge++;
+		}
+		return edge;
+	}
 
-    public final static void setContext(Mapper<?, ?, ?, ?>.Context context) {
-        Vertex.context = context;
-    }
+	/**
+	 * Allocate a new message from the message pool
+	 */
+	private M allocateMessage() {
+		M message;
+		if (usedMessage < msgPool.size()) {
+			message = msgPool.get(usedEdge);
+			usedMessage++;
+		} else {
+			message = BspUtils.<M> createMessageValue(getContext()
+					.getConfiguration());
+			msgPool.add(message);
+			usedMessage++;
+		}
+		return message;
+	}
 
-    @SuppressWarnings("unchecked")
-    public String toString() {
-        Collections.sort(destEdgeList);
-        StringBuffer edgeBuffer = new StringBuffer();
-        edgeBuffer.append("(");
-        for (Edge<I, E> edge : destEdgeList) {
-            edgeBuffer.append(edge.getDestVertexId()).append(",");
-        }
-        edgeBuffer.append(")");
-        return "Vertex(id=" + getVertexId() + ",value=" + getVertexValue() + ", edges=" + edgeBuffer + ")";
-    }
+	/**
+	 * Set the global superstep for all the vertices (internal use)
+	 * 
+	 * @param superstep
+	 *            New superstep
+	 */
+	public static final void setSuperstep(long superstep) {
+		Vertex.superstep = superstep;
+	}
 
-    public void setOutputWriters(List<IFrameWriter> writers) {
-        delegate.setOutputWriters(writers);
-    }
+	/**
+	 * Add an outgoing edge into the vertex
+	 * 
+	 * @param edge
+	 *            the edge to be added
+	 * @return true if the edge list changed as a result of this call
+	 */
+	public boolean addEdge(Edge<I, E> edge) {
+		edge.setConf(getContext().getConfiguration());
+		updated = true;
+		return destEdgeList.add(edge);
+	}
 
-    public void setOutputAppenders(List<FrameTupleAppender> appenders) {
-        delegate.setOutputAppenders(appenders);
-    }
+	/**
+	 * remove an outgoing edge in the graph
+	 * 
+	 * @param edge
+	 *            the edge to be removed
+	 * @return true if the edge is in the edge list of the vertex
+	 */
+	public boolean removeEdge(Edge<I, E> edge) {
+		updated = true;
+		return destEdgeList.remove(edge);
+	}
 
-    public void setOutputTupleBuilders(List<ArrayTupleBuilder> tbs) {
-        delegate.setOutputTupleBuilders(tbs);
-    }
+	/**
+	 * Add a new vertex into the graph
+	 * 
+	 * @param vertexId
+	 *            the vertex id
+	 * @param vertex
+	 *            the vertex
+	 */
+	public final void addVertex(I vertexId, Vertex vertex) {
+		createdNewLiveVertex |= !vertex.isHalted();
+		delegate.addVertex(vertexId, vertex);
+	}
 
-    public void finishCompute() throws IOException {
-        delegate.finishCompute();
-    }
+	/**
+	 * Delete a vertex from id
+	 * 
+	 * @param vertexId
+	 *            the vertex id
+	 */
+	public final void deleteVertex(I vertexId) {
+		delegate.deleteVertex(vertexId);
+	}
 
-    public boolean hasUpdate() {
-        return this.updated;
-    }
+	/**
+	 * Allocate a vertex value from the object pool
+	 * 
+	 * @return a vertex value instance
+	 */
+	private V allocateValue() {
+		V value;
+		if (usedValue < valuePool.size()) {
+			value = valuePool.get(usedValue);
+			usedValue++;
+		} else {
+			value = BspUtils.<V> createVertexValue(getContext()
+					.getConfiguration());
+			valuePool.add(value);
+			usedValue++;
+		}
+		return value;
+	}
 
-    public boolean hasMessage() {
-        return this.hasMessage;
-    }
+	/**
+	 * Get the current global superstep number
+	 * 
+	 * @return the current superstep number
+	 */
+	public static final long getSuperstep() {
+		return superstep;
+	}
 
-    public int getNumOutEdges() {
-        return destEdgeList.size();
-    }
+	/**
+	 * Set the total number of vertices from the last superstep.
+	 * 
+	 * @param numVertices
+	 *            Aggregate vertices in the last superstep
+	 */
+	public static final void setNumVertices(long numVertices) {
+		Vertex.numVertices = numVertices;
+	}
 
-    @SuppressWarnings("unchecked")
-    public void sortEdges() {
-        Collections.sort((List) destEdgeList);
-    }
+	/**
+	 * Get the number of vertexes in the graph
+	 * 
+	 * @return the number of vertexes in the graph
+	 */
+	public static final long getNumVertices() {
+		return numVertices;
+	}
+
+	/**
+	 * Set the total number of edges from the last superstep.
+	 * 
+	 * @param numEdges
+	 *            Aggregate edges in the last superstep
+	 */
+	public static void setNumEdges(long numEdges) {
+		Vertex.numEdges = numEdges;
+	}
+
+	/**
+	 * Get the number of edges from this graph
+	 * 
+	 * @return the number of edges in the graph
+	 */
+	public static final long getNumEdges() {
+		return numEdges;
+	}
+
+	/**
+	 * Pregelix internal use only
+	 */
+	public static final TaskAttemptContext getContext() {
+		return context;
+	}
+
+	/**
+	 * Pregelix internal use only
+	 * 
+	 * @param context
+	 */
+	public static final void setContext(TaskAttemptContext context) {
+		Vertex.context = context;
+	}
 
 }
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/VertexDelegate.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/VertexDelegate.java
index 7267f30..4b153c1 100644
--- a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/VertexDelegate.java
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/VertexDelegate.java
@@ -44,6 +44,16 @@
     private IFrameWriter aliveWriter;
     private FrameTupleAppender appenderAlive;
 
+    /** the tuple for insert */
+    private ArrayTupleBuilder insertTb;
+    private IFrameWriter insertWriter;
+    private FrameTupleAppender appenderInsert;
+
+    /** the tuple for insert */
+    private ArrayTupleBuilder deleteTb;
+    private IFrameWriter deleteWriter;
+    private FrameTupleAppender appenderDelete;
+
     /** message list */
     private MsgList dummyMessageList = new MsgList();
     /** whether alive message should be pushed out */
@@ -95,25 +105,70 @@
         this.vertexId = vertexId;
     }
 
+    public final void addVertex(I vertexId, Vertex vertex) {
+        try {
+            insertTb.reset();
+            DataOutput outputInsert = insertTb.getDataOutput();
+            vertexId.write(outputInsert);
+            insertTb.addFieldEndOffset();
+            vertex.write(outputInsert);
+            insertTb.addFieldEndOffset();
+            FrameTupleUtils.flushTuple(appenderInsert, insertTb, insertWriter);
+
+            /**
+             * push alive when necessary
+             */
+            if (pushAlive && !vertex.isHalted()) {
+                alive.reset();
+                DataOutput outputAlive = alive.getDataOutput();
+                vertexId.write(outputAlive);
+                alive.addFieldEndOffset();
+                dummyMessageList.write(outputAlive);
+                alive.addFieldEndOffset();
+                FrameTupleUtils.flushTuple(appenderAlive, alive, aliveWriter);
+            }
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public final void deleteVertex(I vertexId) {
+        try {
+            deleteTb.reset();
+            DataOutput outputDelete = deleteTb.getDataOutput();
+            vertexId.write(outputDelete);
+            deleteTb.addFieldEndOffset();
+            FrameTupleUtils.flushTuple(appenderDelete, deleteTb, deleteWriter);
+        } catch (Exception e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
     public final void setOutputWriters(List<IFrameWriter> outputs) {
         msgWriter = outputs.get(0);
-        if (outputs.size() > 1) {
-            aliveWriter = outputs.get(1);
+        insertWriter = outputs.get(1);
+        deleteWriter = outputs.get(2);
+        if (outputs.size() > 3) {
+            aliveWriter = outputs.get(outputs.size() - 1);
             pushAlive = true;
         }
     }
 
     public final void setOutputAppenders(List<FrameTupleAppender> appenders) {
         appenderMsg = appenders.get(0);
-        if (appenders.size() > 1) {
-            appenderAlive = appenders.get(1);
+        appenderInsert = appenders.get(1);
+        appenderDelete = appenders.get(2);
+        if (appenders.size() > 3) {
+            appenderAlive = appenders.get(appenders.size() - 1);
         }
     }
 
     public final void setOutputTupleBuilders(List<ArrayTupleBuilder> tbs) {
         message = tbs.get(0);
-        if (tbs.size() > 1) {
-            alive = tbs.get(1);
+        insertTb = tbs.get(1);
+        deleteTb = tbs.get(2);
+        if (tbs.size() > 3) {
+            alive = tbs.get(tbs.size() - 1);
         }
     }
 }
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/BasicGenInputSplit.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/BasicGenInputSplit.java
index 7179737..ea33691 100644
--- a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/BasicGenInputSplit.java
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/io/BasicGenInputSplit.java
@@ -21,60 +21,64 @@
 import java.io.Serializable;
 
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 
 /**
  * This InputSplit will not give any ordering or location data. It is used
  * internally by BspInputFormat (which determines how many tasks to run the
  * application on). Users should not use this directly.
  */
-public class BasicGenInputSplit extends InputSplit implements Writable, Serializable {
-    private static final long serialVersionUID = 1L;
-    /** Number of splits */
-    private int numSplits = -1;
-    /** Split index */
-    private int splitIndex = -1;
+public class BasicGenInputSplit extends FileSplit implements Writable,
+		Serializable {
+	private static final long serialVersionUID = 1L;
+	/** Number of splits */
+	private int numSplits = -1;
+	/** Split index */
+	private int splitIndex = -1;
 
-    public BasicGenInputSplit() {
-    }
+	public BasicGenInputSplit() {
+		super(null, 0, 0, null);
+	}
 
-    public BasicGenInputSplit(int splitIndex, int numSplits) {
-        this.splitIndex = splitIndex;
-        this.numSplits = numSplits;
-    }
+	public BasicGenInputSplit(int splitIndex, int numSplits) {
+		super(null, 0, 0, null);
+		this.splitIndex = splitIndex;
+		this.numSplits = numSplits;
+	}
 
-    @Override
-    public long getLength() throws IOException, InterruptedException {
-        return 0;
-    }
+	@Override
+	public long getLength() {
+		return 0;
+	}
 
-    @Override
-    public String[] getLocations() throws IOException, InterruptedException {
-        return new String[] {};
-    }
+	@Override
+	public String[] getLocations() throws IOException {
+		return new String[] {};
+	}
 
-    @Override
-    public void readFields(DataInput in) throws IOException {
-        splitIndex = in.readInt();
-        numSplits = in.readInt();
-    }
+	@Override
+	public void readFields(DataInput in) throws IOException {
+		splitIndex = in.readInt();
+		numSplits = in.readInt();
+	}
 
-    @Override
-    public void write(DataOutput out) throws IOException {
-        out.writeInt(splitIndex);
-        out.writeInt(numSplits);
-    }
+	@Override
+	public void write(DataOutput out) throws IOException {
+		out.writeInt(splitIndex);
+		out.writeInt(numSplits);
+	}
 
-    public int getSplitIndex() {
-        return splitIndex;
-    }
+	public int getSplitIndex() {
+		return splitIndex;
+	}
 
-    public int getNumSplits() {
-        return numSplits;
-    }
+	public int getNumSplits() {
+		return numSplits;
+	}
 
-    @Override
-    public String toString() {
-        return "'" + getClass().getCanonicalName() + ", index=" + getSplitIndex() + ", num=" + getNumSplits();
-    }
+	@Override
+	public String toString() {
+		return "'" + getClass().getCanonicalName() + ", index="
+				+ getSplitIndex() + ", num=" + getNumSplits();
+	}
 }
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/job/PregelixJob.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/job/PregelixJob.java
index 6ef7e13..8b6d1b6 100644
--- a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/job/PregelixJob.java
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/job/PregelixJob.java
@@ -60,8 +60,12 @@
     public static final String NUM_VERTICE = "pregelix.numVertices";
     /** num of edges */
     public static final String NUM_EDGES = "pregelix.numEdges";
+    /** increase state length */
+    public static final String INCREASE_STATE_LENGTH = "pregelix.incStateLength";
     /** job id */
     public static final String JOB_ID = "pregelix.jobid";
+    /** frame size */
+    public static final String FRAME_SIZE = "pregelix.framesize";
 
     /**
      * Constructor that will instantiate the configuration
@@ -130,8 +134,8 @@
     /**
      * Set the global aggregator class (optional)
      * 
-     * @param vertexCombinerClass
-     *            Determines how vertex messages are combined
+     * @param globalAggregatorClass
+     *            Determines how messages are globally aggregated
      */
     final public void setGlobalAggregatorClass(Class<?> globalAggregatorClass) {
         getConfiguration().setClass(GLOBAL_AGGREGATOR_CLASS, globalAggregatorClass, GlobalAggregator.class);
@@ -139,11 +143,27 @@
 
     /**
      * Set the job Id
-     * 
-     * @param vertexCombinerClass
-     *            Determines how vertex messages are combined
      */
     final public void setJobId(String jobId) {
         getConfiguration().set(JOB_ID, jobId);
     }
+
+    /**
+     * Set whether the vertex state length can be dynamically increased
+     * 
+     * @param jobId
+     */
+    final public void setDynamicVertexValueSize(boolean incStateLengthDynamically) {
+        getConfiguration().setBoolean(INCREASE_STATE_LENGTH, incStateLengthDynamically);
+    }
+
+    /**
+     * Set the frame size for a job
+     * 
+     * @param frameSize
+     *            the desired frame size
+     */
+    final public void setFrameSize(int frameSize) {
+        getConfiguration().setInt(FRAME_SIZE, frameSize);
+    }
 }
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java
index 7c4853f..ff9724d 100644
--- a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java
@@ -410,4 +410,26 @@
             throw new IllegalArgumentException("createMessageValue: Illegally accessed", e);
         }
     }
+
+    /**
+     * Get the job configuration parameter whether the vertex states will increase dynamically
+     * 
+     * @param conf
+     *            the job configuration
+     * @return the boolean setting of the parameter, by default it is false
+     */
+    public static boolean getDynamicVertexValueSize(Configuration conf) {
+        return conf.getBoolean(PregelixJob.INCREASE_STATE_LENGTH, false);
+    }
+
+    /**
+     * Get the specified frame size
+     * 
+     * @param conf
+     *            the job configuration
+     * @return the specified frame size; -1 if it is not set by users
+     */
+    public static int getFrameSize(Configuration conf) {
+        return conf.getInt(PregelixJob.FRAME_SIZE, -1);
+    }
 }
diff --git a/pregelix/pregelix-core/pom.xml b/pregelix/pregelix-core/pom.xml
index 17ea6c3..5238068 100644
--- a/pregelix/pregelix-core/pom.xml
+++ b/pregelix/pregelix-core/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<artifactId>pregelix-core</artifactId>
 	<packaging>jar</packaging>
@@ -19,6 +20,7 @@
 		<plugins>
 			<plugin>
 				<artifactId>maven-jar-plugin</artifactId>
+				<version>2.4</version>
 				<executions>
 					<execution>
 						<id>balancer</id>
@@ -38,11 +40,6 @@
 							</includes>
 						</configuration>
 					</execution>
-				</executions>
-			</plugin>
-			<plugin>
-				<artifactId>maven-jar-plugin</artifactId>
-				<executions>
 					<execution>
 						<id>generator</id>
 						<goals>
@@ -64,36 +61,19 @@
 				</executions>
 			</plugin>
 			<plugin>
-				<artifactId>maven-jar-plugin</artifactId>
-				<executions>
-					<execution>
-						<id>patch</id>
-						<goals>
-							<goal>jar</goal>
-						</goals>
-						<phase>package</phase>
-						<configuration>
-							<classifier>patch</classifier>
-							<finalName>a-hadoop</finalName>
-							<includes>
-								<include>**/org/apache/**</include>
-							</includes>
-						</configuration>
-					</execution>
-				</executions>
-			</plugin>
-			<plugin>
 				<groupId>org.apache.maven.plugins</groupId>
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
 				</configuration>
 			</plugin>
 			<plugin>
 				<groupId>org.codehaus.mojo</groupId>
 				<artifactId>appassembler-maven-plugin</artifactId>
+				<version>1.3</version>
 				<executions>
 					<execution>
 						<configuration>
@@ -166,25 +146,6 @@
 							</resources>
 						</configuration>
 					</execution>
-					<execution>
-						<id>copy-hadoop-patch</id>
-						<!-- here the phase you need -->
-						<phase>package</phase>
-						<goals>
-							<goal>copy-resources</goal>
-						</goals>
-						<configuration>
-							<outputDirectory>target/appassembler/lib</outputDirectory>
-							<resources>
-								<resource>
-									<directory>target</directory>
-									<includes>
-										<include>a-hadoop-patch.jar</include>
-									</includes>
-								</resource>
-							</resources>
-						</configuration>
-					</execution>
 				</executions>
 			</plugin>
 			<plugin>
@@ -193,7 +154,8 @@
 				<version>2.7.2</version>
 				<configuration>
 					<forkMode>pertest</forkMode>
-					<argLine>-enableassertions -Xmx512m -XX:MaxPermSize=300m -Dfile.encoding=UTF-8
+					<argLine>-enableassertions -Xmx512m -XX:MaxPermSize=300m
+						-Dfile.encoding=UTF-8
 						-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
 					<includes>
 						<include>**/*TestSuite.java</include>
@@ -203,6 +165,7 @@
 			</plugin>
 			<plugin>
 				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
 				<configuration>
 					<filesets>
 						<fileset>
@@ -282,11 +245,6 @@
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-data-std</artifactId>
 			<version>0.2.3-SNAPSHOT</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-core</artifactId>
-			<version>0.20.2</version>
 			<type>jar</type>
 			<scope>compile</scope>
 		</dependency>
@@ -319,13 +277,6 @@
 			<scope>compile</scope>
 		</dependency>
 		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-test</artifactId>
-			<version>0.20.2</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
 			<groupId>com.kenai.nbpwr</groupId>
 			<artifactId>org-apache-commons-io</artifactId>
 			<version>1.3.1-201002241208</version>
@@ -335,7 +286,7 @@
 		<dependency>
 			<groupId>edu.uci.ics.hyracks.examples</groupId>
 			<artifactId>hyracks-integration-tests</artifactId>
-			<version>0.2.1</version>
+			<version>0.2.3-SNAPSHOT</version>
 			<scope>test</scope>
 		</dependency>
 		<dependency>
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/driver/Driver.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/driver/Driver.java
index 1b6f195..3a4c41b 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/driver/Driver.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/driver/Driver.java
@@ -72,22 +72,29 @@
     public void runJob(PregelixJob job, Plan planChoice, String ipAddress, int port, boolean profiling)
             throws HyracksException {
         applicationName = exampleClass.getSimpleName() + UUID.randomUUID();
-        /** add hadoop configurations */
-        URL hadoopCore = job.getClass().getClassLoader().getResource("core-site.xml");
-        job.getConfiguration().addResource(hadoopCore);
-        URL hadoopMapRed = job.getClass().getClassLoader().getResource("mapred-site.xml");
-        job.getConfiguration().addResource(hadoopMapRed);
-        URL hadoopHdfs = job.getClass().getClassLoader().getResource("hdfs-site.xml");
-        job.getConfiguration().addResource(hadoopHdfs);
-        ClusterConfig.loadClusterConfig(ipAddress, port);
-
-        LOG.info("job started");
-        long start = System.currentTimeMillis();
-        long end = start;
-        long time = 0;
-
-        this.profiling = profiling;
         try {
+            /** add hadoop configurations */
+            URL hadoopCore = job.getClass().getClassLoader().getResource("core-site.xml");
+            if (hadoopCore != null) {
+                job.getConfiguration().addResource(hadoopCore);
+            }
+            URL hadoopMapRed = job.getClass().getClassLoader().getResource("mapred-site.xml");
+            if (hadoopMapRed != null) {
+                job.getConfiguration().addResource(hadoopMapRed);
+            }
+            URL hadoopHdfs = job.getClass().getClassLoader().getResource("hdfs-site.xml");
+            if (hadoopHdfs != null) {
+                job.getConfiguration().addResource(hadoopHdfs);
+            }
+            ClusterConfig.loadClusterConfig(ipAddress, port);
+
+            LOG.info("job started");
+            long start = System.currentTimeMillis();
+            long end = start;
+            long time = 0;
+
+            this.profiling = profiling;
+
             switch (planChoice) {
                 case INNER_JOIN:
                     jobGen = new JobGenInnerJoin(job);
@@ -146,6 +153,16 @@
             LOG.info("result writing finished " + time + "ms");
             LOG.info("job finished");
         } catch (Exception e) {
+            try {
+                /**
+                 * destroy application if there is any exception
+                 */
+                if (hcc != null) {
+                    destroyApplication(applicationName);
+                }
+            } catch (Exception e2) {
+                throw new HyracksException(e2);
+            }
             throw new HyracksException(e);
         }
     }
@@ -224,8 +241,8 @@
         LOG.info("jar deployment finished " + (end - start) + "ms");
     }
 
-    public void destroyApplication(String jarFile) throws Exception {
-        hcc.destroyApplication(applicationName);
+    public void destroyApplication(String appName) throws Exception {
+        hcc.destroyApplication(appName);
     }
 
 }
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGen.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGen.java
index de29dbc..0b1be61 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGen.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGen.java
@@ -76,8 +76,8 @@
 import edu.uci.ics.pregelix.core.util.DatatypeHelper;
 import edu.uci.ics.pregelix.dataflow.HDFSFileWriteOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.VertexFileScanOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.VertexWriteOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
-import edu.uci.ics.pregelix.dataflow.std.FileWriteOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
 import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
 import edu.uci.ics.pregelix.runtime.bootstrap.StorageManagerInterface;
@@ -89,8 +89,6 @@
     private static final Logger LOGGER = Logger.getLogger(JobGen.class.getName());
     protected static final int MB = 1048576;
     protected static final float DEFAULT_BTREE_FILL_FACTOR = 1.00f;
-    protected static final int frameSize = ClusterConfig.getFrameSize();
-    protected static final int maxFrameSize = (int) (((long) 32 * MB) / frameSize);
     protected static final int tableSize = 10485767;
     protected static final String PRIMARY_INDEX = "primary";
     protected final Configuration conf;
@@ -98,6 +96,8 @@
     protected IIndexRegistryProvider<IIndex> treeRegistryProvider = TreeIndexRegistryProvider.INSTANCE;
     protected IStorageManagerInterface storageManagerInterface = StorageManagerInterface.INSTANCE;
     protected String jobId = new UUID(System.currentTimeMillis(), System.nanoTime()).toString();
+    protected int frameSize = ClusterConfig.getFrameSize();
+    protected int maxFrameNumber = (int) (((long) 32 * MB) / frameSize);
 
     protected static final String SECONDARY_INDEX_ODD = "secondary1";
     protected static final String SECONDARY_INDEX_EVEN = "secondary2";
@@ -107,6 +107,17 @@
         this.giraphJob = job;
         this.initJobConfiguration();
         job.setJobId(jobId);
+
+        // set the frame size to be the one user specified if the user did
+        // specify.
+        int specifiedFrameSize = BspUtils.getFrameSize(job.getConfiguration());
+        if (specifiedFrameSize > 0) {
+            frameSize = specifiedFrameSize;
+            maxFrameNumber = (int) (((long) 32 * MB) / frameSize);
+        }
+        if (maxFrameNumber <= 0) {
+            maxFrameNumber = 1;
+        }
     }
 
     @SuppressWarnings({ "rawtypes", "unchecked" })
@@ -189,9 +200,10 @@
         RecordDescriptor recordDescriptor = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
                 vertexIdClass.getName(), vertexClass.getName());
         IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        String[] readSchedule = ClusterConfig.getHdfsScheduler().getLocationConstraints(splits);
         VertexFileScanOperatorDescriptor scanner = new VertexFileScanOperatorDescriptor(spec, recordDescriptor, splits,
-                confFactory);
-        ClusterConfig.setLocationConstraint(spec, scanner, splits);
+                readSchedule, confFactory);
+        ClusterConfig.setLocationConstraint(spec, scanner);
 
         /**
          * construct sort operator
@@ -203,7 +215,7 @@
         IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
         comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
                 .getClass());
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, maxFrameSize, sortFields,
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, sortFields,
                 nkmFactory, comparatorFactories, recordDescriptor);
         ClusterConfig.setLocationConstraint(spec, sorter);
 
@@ -264,9 +276,10 @@
         RecordDescriptor recordDescriptor = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
                 vertexIdClass.getName(), vertexClass.getName());
         IConfigurationFactory confFactory = new ConfigurationFactory(conf);
+        String[] readSchedule = ClusterConfig.getHdfsScheduler().getLocationConstraints(splits);
         VertexFileScanOperatorDescriptor scanner = new VertexFileScanOperatorDescriptor(spec, recordDescriptor, splits,
-                confFactory);
-        PartitionConstraintHelper.addPartitionCountConstraint(spec, scanner, splits.size());
+                readSchedule, confFactory);
+        ClusterConfig.setLocationConstraint(spec, scanner);
 
         /**
          * construct sort operator
@@ -280,7 +293,7 @@
                 .getClass());
         ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, maxFrameLimit, sortFields,
                 nkmFactory, comparatorFactories, recordDescriptor);
-        PartitionConstraintHelper.addPartitionCountConstraint(spec, sorter, splits.size());
+        ClusterConfig.setLocationConstraint(spec, sorter);
 
         /**
          * construct write file operator
@@ -292,7 +305,7 @@
         IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(confFactory);
         IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
                 vertexIdClass.getName(), vertexClass.getName());
-        FileWriteOperatorDescriptor writer = new FileWriteOperatorDescriptor(spec, inputRdFactory,
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, inputRdFactory,
                 resultFileSplitProvider, preHookFactory, null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { "nc1" });
         PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
@@ -357,7 +370,7 @@
         IRuntimeHookFactory preHookFactory = new RuntimeHookFactory(confFactory);
         IRecordDescriptorFactory inputRdFactory = DataflowUtils.getWritableRecordDescriptorFactoryFromWritableClasses(
                 vertexIdClass.getName(), vertexClass.getName());
-        FileWriteOperatorDescriptor writer = new FileWriteOperatorDescriptor(spec, inputRdFactory,
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, inputRdFactory,
                 resultFileSplitProvider, preHookFactory, null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { "nc1" });
         PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenInnerJoin.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenInnerJoin.java
index 00cdf07..9de4c04 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenInnerJoin.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenInnerJoin.java
@@ -39,6 +39,9 @@
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import edu.uci.ics.pregelix.api.graph.MsgList;
 import edu.uci.ics.pregelix.api.job.PregelixJob;
@@ -127,13 +130,16 @@
                 MsgList.class.getName());
         RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
                 vertexIdClass.getName(), messageValueClass.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
 
         BTreeSearchFunctionUpdateOperatorDescriptor scanner = new BTreeSearchFunctionUpdateOperatorDescriptor(spec,
                 recordDescriptor, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
                 comparatorFactories, JobGenUtil.getForwardScan(iteration), null, null, true, true,
-                new BTreeDataflowHelperFactory(), inputRdFactory, 4,
+                new BTreeDataflowHelperFactory(), inputRdFactory, 6,
                 new StartComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
-                rdPartialAggregate, rdFinal);
+                rdPartialAggregate, rdInsert, rdDelete, rdFinal);
         ClusterConfig.setLocationConstraint(spec, scanner);
 
         /**
@@ -173,7 +179,7 @@
         IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
         sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
                 .getClass());
-        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameSize, keyFields,
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
                 nkmFactory, sortCmpFactories, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, localSort);
 
@@ -212,18 +218,54 @@
         EmptySinkOperatorDescriptor emptySink = new EmptySinkOperatorDescriptor(spec);
         ClusterConfig.setLocationConstraint(spec, emptySink);
 
+        /**
+         * add the insert operator to insert vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
         ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
                 rdUnnestedMessage.getFields()[0]);
-        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
+
         /** connect all operators **/
         spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, scanner, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, localSort, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 1,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), scanner, 1,
                 terminateWriter, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 2,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), scanner, 2,
                 finalAggregator, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 3, btreeBulkLoad, 0);
+
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), scanner, 3, insertOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), scanner, 4, deleteOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), scanner, 5, btreeBulkLoad, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
         spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, partionFactory, keyFields, sortCmpFactories),
                 localGby, 0, globalGby, 0);
@@ -235,8 +277,10 @@
         spec.addRoot(btreeBulkLoad);
         spec.addRoot(terminateWriter);
         spec.addRoot(finalAggregator);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
 
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy(spec));
         spec.setFrameSize(frameSize);
         return spec;
     }
@@ -261,6 +305,9 @@
                 .getClass());
         RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
                 MsgList.class.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
 
         /**
          * construct empty tuple operator
@@ -316,8 +363,8 @@
         IndexNestedLoopJoinFunctionUpdateOperatorDescriptor join = new IndexNestedLoopJoinFunctionUpdateOperatorDescriptor(
                 spec, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
                 comparatorFactories, JobGenUtil.getForwardScan(iteration), keyFields, keyFields, true, true,
-                new BTreeDataflowHelperFactory(), inputRdFactory, 4, new ComputeUpdateFunctionFactory(confFactory),
-                preHookFactory, null, rdUnnestedMessage, rdDummy, rdPartialAggregate, rdFinal);
+                new BTreeDataflowHelperFactory(), inputRdFactory, 6, new ComputeUpdateFunctionFactory(confFactory),
+                preHookFactory, null, rdUnnestedMessage, rdDummy, rdPartialAggregate, rdInsert, rdDelete, rdFinal);
         ClusterConfig.setLocationConstraint(spec, join);
 
         /**
@@ -342,7 +389,7 @@
         IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
         sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
                 .getClass());
-        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameSize, keyFields,
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
                 nkmFactory, sortCmpFactories, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, localSort);
 
@@ -395,7 +442,33 @@
                 configurationFactory, aggRdFactory, jobId);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
 
-        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        /**
+         * add the insert operator to insert vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
         ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
                 rdUnnestedMessage.getFields()[0]);
         /** connect all operators **/
@@ -404,12 +477,20 @@
         spec.connect(new OneToOneConnectorDescriptor(spec), materializeRead, 0, setUnion, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), setUnion, 0, join, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), join, 0, localSort, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 1,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), join, 1,
                 terminateWriter, 0);
-
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 2,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), join, 2,
                 finalAggregator, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), join, 3, btreeBulkLoad, 0);
+
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), join, 3, insertOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), join, 4, deleteOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), join, 5, btreeBulkLoad, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
         spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, partionFactory, keyFields, sortCmpFactories),
                 localGby, 0, globalGby, 0);
@@ -420,8 +501,10 @@
         spec.addRoot(emptySink);
         spec.addRoot(btreeBulkLoad);
         spec.addRoot(terminateWriter);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
 
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy(spec));
         spec.setFrameSize(frameSize);
         return spec;
     }
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoin.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoin.java
index 3847aa7..91c15b2 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoin.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoin.java
@@ -39,6 +39,9 @@
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import edu.uci.ics.pregelix.api.graph.MsgList;
 import edu.uci.ics.pregelix.api.job.PregelixJob;
@@ -121,13 +124,16 @@
                 vertexIdClass.getName(), vertexClass.getName());
         RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
                 vertexIdClass.getName(), messageValueClass.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
 
         BTreeSearchFunctionUpdateOperatorDescriptor scanner = new BTreeSearchFunctionUpdateOperatorDescriptor(spec,
                 recordDescriptor, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
                 comparatorFactories, JobGenUtil.getForwardScan(iteration), null, null, true, true,
-                new BTreeDataflowHelperFactory(), inputRdFactory, 3,
+                new BTreeDataflowHelperFactory(), inputRdFactory, 5,
                 new StartComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
-                rdPartialAggregate);
+                rdPartialAggregate, rdInsert, rdDelete);
         ClusterConfig.setLocationConstraint(spec, scanner);
 
         /**
@@ -139,14 +145,15 @@
         IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
         sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
                 .getClass());
-        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameSize, keyFields,
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
                 nkmFactory, sortCmpFactories, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, localSort);
 
         /**
          * construct local pre-clustered group-by operator
          */
-        IAggregatorDescriptorFactory aggregatorFactory = DataflowUtils.getAccumulatingAggregatorFactory(conf, false, false);
+        IAggregatorDescriptorFactory aggregatorFactory = DataflowUtils.getAccumulatingAggregatorFactory(conf, false,
+                false);
         PreclusteredGroupOperatorDescriptor localGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
                 sortCmpFactories, aggregatorFactory, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, localGby);
@@ -156,8 +163,8 @@
          */
         RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
                 MsgList.class.getName());
-        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils
-                .getAccumulatingAggregatorFactory(conf, true, true);
+        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils.getAccumulatingAggregatorFactory(conf,
+                true, true);
         PreclusteredGroupOperatorDescriptor globalGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
                 sortCmpFactories, aggregatorFactoryFinal, rdFinal);
         ClusterConfig.setLocationConstraint(spec, globalGby);
@@ -182,7 +189,7 @@
         TerminationStateWriterOperatorDescriptor terminateWriter = new TerminationStateWriterOperatorDescriptor(spec,
                 configurationFactory, jobId);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, terminateWriter, 1);
-        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
 
         /**
          * final aggregate write operator
@@ -193,16 +200,55 @@
                 configurationFactory, aggRdFactory, jobId);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
 
+        /**
+         * add the insert operator to insert vertexes
+         */
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
         ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
                 rdUnnestedMessage.getFields()[0]);
         /** connect all operators **/
         spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, scanner, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, localSort, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 1,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), scanner, 1,
                 terminateWriter, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 2,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), scanner, 2,
                 finalAggregator, 0);
+
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), scanner, 3, insertOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), scanner, 4, deleteOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
+        /**
+         * connect the group-by operator
+         */
         spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
         spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, partionFactory, keyFields, sortCmpFactories),
                 localGby, 0, globalGby, 0);
@@ -213,8 +259,10 @@
         spec.addRoot(terminateWriter);
         spec.addRoot(finalAggregator);
         spec.addRoot(emptySink2);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
 
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy(spec));
         spec.setFrameSize(frameSize);
         return spec;
     }
@@ -239,6 +287,9 @@
                 .getClass());
         RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
                 MsgList.class.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
 
         /**
          * construct empty tuple operator
@@ -286,9 +337,9 @@
         IndexNestedLoopJoinFunctionUpdateOperatorDescriptor join = new IndexNestedLoopJoinFunctionUpdateOperatorDescriptor(
                 spec, storageManagerInterface, treeRegistryProvider, fileSplitProvider, interiorFrameFactory,
                 leafFrameFactory, typeTraits, comparatorFactories, JobGenUtil.getForwardScan(iteration), keyFields,
-                keyFields, true, true, new BTreeDataflowHelperFactory(), true, nullWriterFactories, inputRdFactory, 3,
+                keyFields, true, true, new BTreeDataflowHelperFactory(), true, nullWriterFactories, inputRdFactory, 5,
                 new ComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
-                rdPartialAggregate);
+                rdPartialAggregate, rdInsert, rdDelete);
         ClusterConfig.setLocationConstraint(spec, join);
 
         /**
@@ -299,14 +350,15 @@
         IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
         sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
                 .getClass());
-        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameSize, keyFields,
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
                 nkmFactory, sortCmpFactories, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, localSort);
 
         /**
          * construct local pre-clustered group-by operator
          */
-        IAggregatorDescriptorFactory aggregatorFactory = DataflowUtils.getAccumulatingAggregatorFactory(conf, false, false);
+        IAggregatorDescriptorFactory aggregatorFactory = DataflowUtils.getAccumulatingAggregatorFactory(conf, false,
+                false);
         PreclusteredGroupOperatorDescriptor localGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
                 sortCmpFactories, aggregatorFactory, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, localGby);
@@ -314,8 +366,8 @@
         /**
          * construct global group-by operator
          */
-        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils
-                .getAccumulatingAggregatorFactory(conf, true, true);
+        IAggregatorDescriptorFactory aggregatorFactoryFinal = DataflowUtils.getAccumulatingAggregatorFactory(conf,
+                true, true);
         PreclusteredGroupOperatorDescriptor globalGby = new PreclusteredGroupOperatorDescriptor(spec, keyFields,
                 sortCmpFactories, aggregatorFactoryFinal, rdFinal);
         ClusterConfig.setLocationConstraint(spec, globalGby);
@@ -351,7 +403,34 @@
                 configurationFactory, aggRdFactory, jobId);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
 
-        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        /**
+         * add the insert operator to insert vertexes
+         */
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
         ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
                 rdUnnestedMessage.getFields()[0]);
 
@@ -360,10 +439,19 @@
         spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, materializeRead, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), materializeRead, 0, join, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), join, 0, localSort, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 1,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), join, 1,
                 terminateWriter, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 2,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), join, 2,
                 finalAggregator, 0);
+
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), join, 3, insertOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), join, 4, deleteOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
         spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
         spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, partionFactory, keyFields, sortCmpFactories),
                 localGby, 0, globalGby, 0);
@@ -375,7 +463,7 @@
         spec.addRoot(finalAggregator);
         spec.addRoot(emptySink);
 
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy(spec));
         spec.setFrameSize(frameSize);
         return spec;
     }
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSingleSort.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSingleSort.java
index ec783a7..ee1fd0f 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSingleSort.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSingleSort.java
@@ -38,6 +38,9 @@
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import edu.uci.ics.pregelix.api.graph.MsgList;
 import edu.uci.ics.pregelix.api.job.PregelixJob;
@@ -47,12 +50,12 @@
 import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
 import edu.uci.ics.pregelix.core.runtime.touchpoint.WritableComparingBinaryComparatorFactory;
 import edu.uci.ics.pregelix.core.util.DataflowUtils;
+import edu.uci.ics.pregelix.dataflow.ConnectorPolicyAssignmentPolicy;
 import edu.uci.ics.pregelix.dataflow.EmptySinkOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.EmptyTupleSourceOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.FinalAggregateOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.MaterializingReadOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.MaterializingWriteOperatorDescriptor;
-import edu.uci.ics.pregelix.dataflow.NonCombinerConnectorPolicyAssignmentPolicy;
 import edu.uci.ics.pregelix.dataflow.TerminationStateWriterOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
 import edu.uci.ics.pregelix.dataflow.std.BTreeSearchFunctionUpdateOperatorDescriptor;
@@ -123,12 +126,16 @@
                 vertexIdClass.getName(), vertexClass.getName());
         RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
                 vertexIdClass.getName(), messageValueClass.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
+
         BTreeSearchFunctionUpdateOperatorDescriptor scanner = new BTreeSearchFunctionUpdateOperatorDescriptor(spec,
                 recordDescriptor, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
                 comparatorFactories, JobGenUtil.getForwardScan(iteration), null, null, true, true,
-                new BTreeDataflowHelperFactory(), inputRdFactory, 3,
+                new BTreeDataflowHelperFactory(), inputRdFactory, 5,
                 new StartComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
-                rdPartialAggregate);
+                rdPartialAggregate, rdInsert, rdDelete);
         ClusterConfig.setLocationConstraint(spec, scanner);
 
         /**
@@ -140,7 +147,7 @@
         IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
         sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
                 .getClass());
-        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameSize, keyFields,
+        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
                 nkmFactory, sortCmpFactories, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, globalSort);
 
@@ -185,17 +192,53 @@
                 configurationFactory, aggRdFactory, jobId);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
 
-        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        /**
+         * add the insert operator to insert vertexes
+         */
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
         ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
                 rdUnnestedMessage.getFields()[0]);
         /** connect all operators **/
         spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, scanner, 0);
         spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), scanner, 0, globalSort, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 1,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), scanner, 1,
                 terminateWriter, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 2,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), scanner, 2,
                 finalAggregator, 0);
+
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), scanner, 3, insertOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), scanner, 4, deleteOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
         spec.connect(new OneToOneConnectorDescriptor(spec), globalSort, 0, globalGby, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), globalGby, 0, materialize, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), materialize, 0, postSuperStep, 0);
@@ -204,8 +247,10 @@
         spec.addRoot(terminateWriter);
         spec.addRoot(finalAggregator);
         spec.addRoot(emptySink2);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
 
-        spec.setConnectorPolicyAssignmentPolicy(new NonCombinerConnectorPolicyAssignmentPolicy());
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy(spec));
         spec.setFrameSize(frameSize);
         return spec;
     }
@@ -230,6 +275,9 @@
                 .getClass());
         RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
                 MsgList.class.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
 
         /**
          * construct empty tuple operator
@@ -277,9 +325,9 @@
         IndexNestedLoopJoinFunctionUpdateOperatorDescriptor join = new IndexNestedLoopJoinFunctionUpdateOperatorDescriptor(
                 spec, storageManagerInterface, treeRegistryProvider, fileSplitProvider, interiorFrameFactory,
                 leafFrameFactory, typeTraits, comparatorFactories, JobGenUtil.getForwardScan(iteration), keyFields,
-                keyFields, true, true, new BTreeDataflowHelperFactory(), true, nullWriterFactories, inputRdFactory, 3,
+                keyFields, true, true, new BTreeDataflowHelperFactory(), true, nullWriterFactories, inputRdFactory, 5,
                 new ComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
-                rdPartialAggregate);
+                rdPartialAggregate, rdInsert, rdDelete);
         ClusterConfig.setLocationConstraint(spec, join);
 
         /**
@@ -290,7 +338,7 @@
         IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
         sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
                 .getClass());
-        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameSize, keyFields,
+        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
                 nkmFactory, sortCmpFactories, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, globalSort);
 
@@ -334,7 +382,31 @@
                 configurationFactory, aggRdFactory, jobId);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
 
-        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
         ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
                 rdUnnestedMessage.getFields()[0]);
 
@@ -343,10 +415,18 @@
         spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, materializeRead, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), materializeRead, 0, join, 0);
         spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), join, 0, globalSort, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 1,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), join, 1,
                 terminateWriter, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 2,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), join, 2,
                 finalAggregator, 0);
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), join, 3, insertOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), join, 4, deleteOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
         spec.connect(new OneToOneConnectorDescriptor(spec), globalSort, 0, globalGby, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), globalGby, 0, materialize, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), materialize, 0, postSuperStep, 0);
@@ -356,7 +436,7 @@
         spec.addRoot(finalAggregator);
         spec.addRoot(emptySink);
 
-        spec.setConnectorPolicyAssignmentPolicy(new NonCombinerConnectorPolicyAssignmentPolicy());
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy(spec));
         spec.setFrameSize(frameSize);
         return spec;
     }
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSort.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSort.java
index bb939e3..628e9ce 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSort.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSort.java
@@ -38,6 +38,9 @@
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import edu.uci.ics.pregelix.api.graph.MsgList;
 import edu.uci.ics.pregelix.api.job.PregelixJob;
@@ -47,6 +50,7 @@
 import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
 import edu.uci.ics.pregelix.core.runtime.touchpoint.WritableComparingBinaryComparatorFactory;
 import edu.uci.ics.pregelix.core.util.DataflowUtils;
+import edu.uci.ics.pregelix.dataflow.ConnectorPolicyAssignmentPolicy;
 import edu.uci.ics.pregelix.dataflow.EmptySinkOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.EmptyTupleSourceOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.FinalAggregateOperatorDescriptor;
@@ -119,12 +123,16 @@
                 vertexIdClass.getName(), vertexClass.getName());
         RecordDescriptor rdUnnestedMessage = DataflowUtils.getRecordDescriptorFromKeyValueClasses(
                 vertexIdClass.getName(), messageValueClass.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
+
         BTreeSearchFunctionUpdateOperatorDescriptor scanner = new BTreeSearchFunctionUpdateOperatorDescriptor(spec,
                 recordDescriptor, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
                 comparatorFactories, JobGenUtil.getForwardScan(iteration), null, null, true, true,
-                new BTreeDataflowHelperFactory(), inputRdFactory, 3,
+                new BTreeDataflowHelperFactory(), inputRdFactory, 5,
                 new StartComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
-                rdPartialAggregate);
+                rdPartialAggregate, rdInsert, rdDelete);
         ClusterConfig.setLocationConstraint(spec, scanner);
 
         /**
@@ -136,7 +144,7 @@
         IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
         sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
                 .getClass());
-        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameSize, keyFields,
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
                 nkmFactory, sortCmpFactories, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, localSort);
 
@@ -152,7 +160,7 @@
         /**
          * construct global sort operator
          */
-        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameSize, keyFields,
+        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
                 nkmFactory, sortCmpFactories, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, globalSort);
 
@@ -187,7 +195,7 @@
         TerminationStateWriterOperatorDescriptor terminateWriter = new TerminationStateWriterOperatorDescriptor(spec,
                 configurationFactory, jobId);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, terminateWriter, 1);
-        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
 
         /**
          * final aggregate write operator
@@ -198,16 +206,51 @@
                 configurationFactory, aggRdFactory, jobId);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
 
+        /**
+         * add the insert operator to insert vertexes
+         */
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
         ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
                 rdUnnestedMessage.getFields()[0]);
         /** connect all operators **/
         spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, scanner, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, localSort, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 1,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), scanner, 1,
                 terminateWriter, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 2,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), scanner, 2,
                 finalAggregator, 0);
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), scanner, 3, insertOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), scanner, 4, deleteOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
         spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
         spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), localGby, 0, globalSort, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), globalSort, 0, globalGby, 0);
@@ -218,8 +261,11 @@
         spec.addRoot(terminateWriter);
         spec.addRoot(finalAggregator);
         spec.addRoot(emptySink2);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
 
         spec.setFrameSize(frameSize);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy(spec));
         return spec;
     }
 
@@ -243,6 +289,9 @@
                 .getClass());
         RecordDescriptor rdFinal = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
                 MsgList.class.getName());
+        RecordDescriptor rdInsert = DataflowUtils.getRecordDescriptorFromKeyValueClasses(vertexIdClass.getName(),
+                vertexClass.getName());
+        RecordDescriptor rdDelete = DataflowUtils.getRecordDescriptorFromWritableClasses(vertexIdClass.getName());
 
         /**
          * construct empty tuple operator
@@ -290,9 +339,9 @@
         IndexNestedLoopJoinFunctionUpdateOperatorDescriptor join = new IndexNestedLoopJoinFunctionUpdateOperatorDescriptor(
                 spec, storageManagerInterface, treeRegistryProvider, fileSplitProvider, interiorFrameFactory,
                 leafFrameFactory, typeTraits, comparatorFactories, JobGenUtil.getForwardScan(iteration), keyFields,
-                keyFields, true, true, new BTreeDataflowHelperFactory(), true, nullWriterFactories, inputRdFactory, 3,
+                keyFields, true, true, new BTreeDataflowHelperFactory(), true, nullWriterFactories, inputRdFactory, 5,
                 new ComputeUpdateFunctionFactory(confFactory), preHookFactory, null, rdUnnestedMessage, rdDummy,
-                rdPartialAggregate);
+                rdPartialAggregate, rdInsert, rdDelete);
         ClusterConfig.setLocationConstraint(spec, join);
 
         /**
@@ -303,7 +352,7 @@
         IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
         sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
                 .getClass());
-        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameSize, keyFields,
+        ExternalSortOperatorDescriptor localSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
                 nkmFactory, sortCmpFactories, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, localSort);
 
@@ -319,7 +368,7 @@
         /**
          * construct global sort operator
          */
-        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameSize, keyFields,
+        ExternalSortOperatorDescriptor globalSort = new ExternalSortOperatorDescriptor(spec, maxFrameNumber, keyFields,
                 nkmFactory, sortCmpFactories, rdUnnestedMessage);
         ClusterConfig.setLocationConstraint(spec, globalSort);
 
@@ -363,7 +412,34 @@
                 configurationFactory, aggRdFactory, jobId);
         PartitionConstraintHelper.addPartitionCountConstraint(spec, finalAggregator, 1);
 
-        ITuplePartitionComputerFactory hashPartitionComputerFactory = new MergePartitionComputerFactory();
+        /**
+         * add the insert operator to insert vertexes
+         */
+        int[] fieldPermutation = new int[] { 0, 1 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor insertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdInsert, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.INSERT, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, insertOp);
+
+        /**
+         * add the delete operator to delete vertexes
+         */
+        TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, rdDelete, storageManagerInterface, treeRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, fieldPermutation, IndexOp.DELETE, new BTreeDataflowHelperFactory(), null,
+                NoOpOperationCallbackProvider.INSTANCE);
+        ClusterConfig.setLocationConstraint(spec, deleteOp);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink3);
+
+        /** construct empty sink operator */
+        EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
+        ClusterConfig.setLocationConstraint(spec, emptySink4);
+
+        ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
         ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
                 rdUnnestedMessage.getFields()[0]);
 
@@ -372,10 +448,18 @@
         spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, materializeRead, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), materializeRead, 0, join, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), join, 0, localSort, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 1,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), join, 1,
                 terminateWriter, 0);
-        spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), join, 2,
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, unifyingPartitionComputerFactory), join, 2,
                 finalAggregator, 0);
+        /**
+         * connect the insert/delete operator
+         */
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), join, 3, insertOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), insertOp, 0, emptySink3, 0);
+        spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), join, 4, deleteOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), deleteOp, 0, emptySink4, 0);
+
         spec.connect(new OneToOneConnectorDescriptor(spec), localSort, 0, localGby, 0);
         spec.connect(new MToNPartitioningConnectorDescriptor(spec, partionFactory), localGby, 0, globalSort, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), globalSort, 0, globalGby, 0);
@@ -386,8 +470,11 @@
         spec.addRoot(terminateWriter);
         spec.addRoot(finalAggregator);
         spec.addRoot(emptySink);
+        spec.addRoot(emptySink3);
+        spec.addRoot(emptySink4);
 
         spec.setFrameSize(frameSize);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy(spec));
         return spec;
     }
 
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/clusterconfig/ClusterConfig.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/clusterconfig/ClusterConfig.java
index 8eadab9..d26e637 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/clusterconfig/ClusterConfig.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/clusterconfig/ClusterConfig.java
@@ -40,6 +40,7 @@
 import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.hdfs2.scheduler.Scheduler;
 
 public class ClusterConfig {
 
@@ -49,6 +50,7 @@
     private static Properties clusterProperties = new Properties();
     private static Map<String, List<String>> ipToNcMapping;
     private static String[] stores;
+    private static Scheduler hdfsScheduler;
 
     /**
      * let tests set config path to be whatever
@@ -211,6 +213,8 @@
                 NCs[i] = entry.getKey();
                 i++;
             }
+
+            hdfsScheduler = new Scheduler(ipAddress, port);
         } catch (Exception e) {
             throw new IllegalStateException(e);
         }
@@ -218,4 +222,8 @@
         loadClusterProperties();
         loadStores();
     }
+
+    public static Scheduler getHdfsScheduler() {
+        return hdfsScheduler;
+    }
 }
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/PregelixHyracksIntegrationUtil.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/PregelixHyracksIntegrationUtil.java
index ed04746..cd2a864 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/PregelixHyracksIntegrationUtil.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/PregelixHyracksIntegrationUtil.java
@@ -14,6 +14,7 @@
  */
 package edu.uci.ics.pregelix.core.util;
 
+import java.io.File;
 import java.util.EnumSet;
 
 import edu.uci.ics.hyracks.api.client.HyracksConnection;
@@ -44,14 +45,16 @@
     private static NodeControllerService nc2;
     private static IHyracksClientConnection hcc;
 
-    public static void init() throws Exception {
+    public static void init(String topologyFilePath) throws Exception {
         CCConfig ccConfig = new CCConfig();
         ccConfig.clientNetIpAddress = CC_HOST;
         ccConfig.clusterNetIpAddress = CC_HOST;
         ccConfig.clusterNetPort = TEST_HYRACKS_CC_PORT;
         ccConfig.clientNetPort = TEST_HYRACKS_CC_CLIENT_PORT;
         ccConfig.defaultMaxJobAttempts = 0;
-        ccConfig.jobHistorySize = 10;
+        ccConfig.jobHistorySize = 0;
+        ccConfig.profileDumpPeriod = -1;
+        ccConfig.clusterTopologyDefinition = new File(topologyFilePath);
 
         // cluster controller
         cc = new ClusterControllerService(ccConfig);
@@ -63,6 +66,7 @@
         ncConfig1.clusterNetIPAddress = "localhost";
         ncConfig1.ccPort = TEST_HYRACKS_CC_PORT;
         ncConfig1.dataIPAddress = "127.0.0.1";
+        ncConfig1.datasetIPAddress = "127.0.0.1";
         ncConfig1.nodeId = NC1_ID;
         nc1 = new NodeControllerService(ncConfig1);
         nc1.start();
@@ -72,6 +76,7 @@
         ncConfig2.clusterNetIPAddress = "localhost";
         ncConfig2.ccPort = TEST_HYRACKS_CC_PORT;
         ncConfig2.dataIPAddress = "127.0.0.1";
+        ncConfig2.datasetIPAddress = "127.0.0.1";
         ncConfig2.nodeId = NC2_ID;
         nc2 = new NodeControllerService(ncConfig2);
         nc2.start();
diff --git a/pregelix/pregelix-core/src/main/java/org/apache/hadoop/fs/Path.java b/pregelix/pregelix-core/src/main/java/org/apache/hadoop/fs/Path.java
deleted file mode 100644
index 5efdde8..0000000
--- a/pregelix/pregelix-core/src/main/java/org/apache/hadoop/fs/Path.java
+++ /dev/null
@@ -1,355 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.net.URI;
-import java.net.URISyntaxException;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * Names a file or directory in a {@link FileSystem}. Path strings use slash as
- * the directory separator. A path string is absolute if it begins with a slash.
- */
-@SuppressWarnings("rawtypes")
-public class Path implements Comparable, Serializable {
-    private static final long serialVersionUID = 1L;
-    /** The directory separator, a slash. */
-    public static final String SEPARATOR = "/";
-    public static final char SEPARATOR_CHAR = '/';
-
-    public static final String CUR_DIR = ".";
-
-    static final boolean WINDOWS = System.getProperty("os.name").startsWith("Windows");
-
-    private URI uri; // a hierarchical uri
-
-    /** Resolve a child path against a parent path. */
-    public Path(String parent, String child) {
-        this(new Path(parent), new Path(child));
-    }
-
-    /** Resolve a child path against a parent path. */
-    public Path(Path parent, String child) {
-        this(parent, new Path(child));
-    }
-
-    /** Resolve a child path against a parent path. */
-    public Path(String parent, Path child) {
-        this(new Path(parent), child);
-    }
-
-    /** Resolve a child path against a parent path. */
-    public Path(Path parent, Path child) {
-        // Add a slash to parent's path so resolution is compatible with URI's
-        URI parentUri = parent.uri;
-        String parentPath = parentUri.getPath();
-        if (!(parentPath.equals("/") || parentPath.equals("")))
-            try {
-                parentUri = new URI(parentUri.getScheme(), parentUri.getAuthority(), parentUri.getPath() + "/", null,
-                        parentUri.getFragment());
-            } catch (URISyntaxException e) {
-                throw new IllegalArgumentException(e);
-            }
-        URI resolved = parentUri.resolve(child.uri);
-        initialize(resolved.getScheme(), resolved.getAuthority(), normalizePath(resolved.getPath()),
-                resolved.getFragment());
-    }
-
-    private void checkPathArg(String path) {
-        // disallow construction of a Path from an empty string
-        if (path == null) {
-            throw new IllegalArgumentException("Can not create a Path from a null string");
-        }
-        if (path.length() == 0) {
-            throw new IllegalArgumentException("Can not create a Path from an empty string");
-        }
-    }
-
-    /**
-     * Construct a path from a String. Path strings are URIs, but with unescaped
-     * elements and some additional normalization.
-     */
-    public Path(String pathString) {
-        checkPathArg(pathString);
-
-        // We can't use 'new URI(String)' directly, since it assumes things are
-        // escaped, which we don't require of Paths.
-
-        // add a slash in front of paths with Windows drive letters
-        if (hasWindowsDrive(pathString, false))
-            pathString = "/" + pathString;
-
-        // parse uri components
-        String scheme = null;
-        String authority = null;
-
-        int start = 0;
-
-        // parse uri scheme, if any
-        int colon = pathString.indexOf(':');
-        int slash = pathString.indexOf('/');
-        if ((colon != -1) && ((slash == -1) || (colon < slash))) { // has a
-                                                                   // scheme
-            scheme = pathString.substring(0, colon);
-            start = colon + 1;
-        }
-
-        // parse uri authority, if any
-        if (pathString.startsWith("//", start) && (pathString.length() - start > 2)) { // has
-                                                                                       // authority
-            int nextSlash = pathString.indexOf('/', start + 2);
-            int authEnd = nextSlash > 0 ? nextSlash : pathString.length();
-            authority = pathString.substring(start + 2, authEnd);
-            start = authEnd;
-        }
-
-        // uri path is the rest of the string -- query & fragment not supported
-        String path = pathString.substring(start, pathString.length());
-
-        initialize(scheme, authority, path, null);
-    }
-
-    /** Construct a Path from components. */
-    public Path(String scheme, String authority, String path) {
-        checkPathArg(path);
-        initialize(scheme, authority, path, null);
-    }
-
-    /**
-     * Construct a path from a URI
-     */
-    public Path(URI aUri) {
-        uri = aUri;
-    }
-
-    private void initialize(String scheme, String authority, String path, String fragment) {
-        try {
-            this.uri = new URI(scheme, authority, normalizePath(path), null, fragment).normalize();
-        } catch (URISyntaxException e) {
-            throw new IllegalArgumentException(e);
-        }
-    }
-
-    private String normalizePath(String path) {
-        // remove double slashes & backslashes
-        if (path.indexOf("//") != -1) {
-            path = path.replace("//", "/");
-        }
-        if (path.indexOf("\\") != -1) {
-            path = path.replace("\\", "/");
-        }
-
-        // trim trailing slash from non-root path (ignoring windows drive)
-        int minLength = hasWindowsDrive(path, true) ? 4 : 1;
-        if (path.length() > minLength && path.endsWith("/")) {
-            path = path.substring(0, path.length() - 1);
-        }
-
-        return path;
-    }
-
-    private boolean hasWindowsDrive(String path, boolean slashed) {
-        if (!WINDOWS)
-            return false;
-        int start = slashed ? 1 : 0;
-        return path.length() >= start + 2
-                && (slashed ? path.charAt(0) == '/' : true)
-                && path.charAt(start + 1) == ':'
-                && ((path.charAt(start) >= 'A' && path.charAt(start) <= 'Z') || (path.charAt(start) >= 'a' && path
-                        .charAt(start) <= 'z'));
-    }
-
-    /** Convert this to a URI. */
-    public URI toUri() {
-        return uri;
-    }
-
-    /** Return the FileSystem that owns this Path. */
-    public FileSystem getFileSystem(Configuration conf) throws IOException {
-        return FileSystem.get(this.toUri(), conf);
-    }
-
-    /** True if the directory of this path is absolute. */
-    public boolean isAbsolute() {
-        int start = hasWindowsDrive(uri.getPath(), true) ? 3 : 0;
-        return uri.getPath().startsWith(SEPARATOR, start);
-    }
-
-    /** Returns the final component of this path. */
-    public String getName() {
-        String path = uri.getPath();
-        int slash = path.lastIndexOf(SEPARATOR);
-        return path.substring(slash + 1);
-    }
-
-    /** Returns the parent of a path or null if at root. */
-    public Path getParent() {
-        String path = uri.getPath();
-        int lastSlash = path.lastIndexOf('/');
-        int start = hasWindowsDrive(path, true) ? 3 : 0;
-        if ((path.length() == start) || // empty path
-                (lastSlash == start && path.length() == start + 1)) { // at root
-            return null;
-        }
-        String parent;
-        if (lastSlash == -1) {
-            parent = CUR_DIR;
-        } else {
-            int end = hasWindowsDrive(path, true) ? 3 : 0;
-            parent = path.substring(0, lastSlash == end ? end + 1 : lastSlash);
-        }
-        return new Path(uri.getScheme(), uri.getAuthority(), parent);
-    }
-
-    /** Adds a suffix to the final name in the path. */
-    public Path suffix(String suffix) {
-        return new Path(getParent(), getName() + suffix);
-    }
-
-    public String toString() {
-        // we can't use uri.toString(), which escapes everything, because we
-        // want
-        // illegal characters unescaped in the string, for glob processing, etc.
-        StringBuffer buffer = new StringBuffer();
-        if (uri.getScheme() != null) {
-            buffer.append(uri.getScheme());
-            buffer.append(":");
-        }
-        if (uri.getAuthority() != null) {
-            buffer.append("//");
-            buffer.append(uri.getAuthority());
-        }
-        if (uri.getPath() != null) {
-            String path = uri.getPath();
-            if (path.indexOf('/') == 0 && hasWindowsDrive(path, true) && // has
-                                                                         // windows
-                                                                         // drive
-                    uri.getScheme() == null && // but no scheme
-                    uri.getAuthority() == null) // or authority
-                path = path.substring(1); // remove slash before drive
-            buffer.append(path);
-        }
-        if (uri.getFragment() != null) {
-            buffer.append("#");
-            buffer.append(uri.getFragment());
-        }
-        return buffer.toString();
-    }
-
-    public boolean equals(Object o) {
-        if (!(o instanceof Path)) {
-            return false;
-        }
-        Path that = (Path) o;
-        return this.uri.equals(that.uri);
-    }
-
-    public int hashCode() {
-        return uri.hashCode();
-    }
-
-    public int compareTo(Object o) {
-        Path that = (Path) o;
-        return this.uri.compareTo(that.uri);
-    }
-
-    /** Return the number of elements in this path. */
-    public int depth() {
-        String path = uri.getPath();
-        int depth = 0;
-        int slash = path.length() == 1 && path.charAt(0) == '/' ? -1 : 0;
-        while (slash != -1) {
-            depth++;
-            slash = path.indexOf(SEPARATOR, slash + 1);
-        }
-        return depth;
-    }
-
-    /** Returns a qualified path object. */
-    public Path makeQualified(FileSystem fs) {
-        Path path = this;
-        if (!isAbsolute()) {
-            path = new Path(fs.getWorkingDirectory(), this);
-        }
-
-        URI pathUri = path.toUri();
-        URI fsUri = fs.getUri();
-
-        String scheme = pathUri.getScheme();
-        String authority = pathUri.getAuthority();
-        String fragment = pathUri.getFragment();
-        if (scheme != null && (authority != null || fsUri.getAuthority() == null))
-            return path;
-
-        if (scheme == null) {
-            scheme = fsUri.getScheme();
-        }
-
-        if (authority == null) {
-            authority = fsUri.getAuthority();
-            if (authority == null) {
-                authority = "";
-            }
-        }
-
-        URI newUri = null;
-        try {
-            newUri = new URI(scheme, authority, normalizePath(pathUri.getPath()), null, fragment);
-        } catch (URISyntaxException e) {
-            throw new IllegalArgumentException(e);
-        }
-        return new Path(newUri);
-    }
-
-    /** Returns a qualified path object. */
-    public Path makeQualified(URI defaultUri, Path workingDir) {
-        Path path = this;
-        if (!isAbsolute()) {
-            path = new Path(workingDir, this);
-        }
-
-        URI pathUri = path.toUri();
-
-        String scheme = pathUri.getScheme();
-        String authority = pathUri.getAuthority();
-        String fragment = pathUri.getFragment();
-
-        if (scheme != null && (authority != null || defaultUri.getAuthority() == null))
-            return path;
-
-        if (scheme == null) {
-            scheme = defaultUri.getScheme();
-        }
-
-        if (authority == null) {
-            authority = defaultUri.getAuthority();
-            if (authority == null) {
-                authority = "";
-            }
-        }
-
-        URI newUri = null;
-        try {
-            newUri = new URI(scheme, authority, normalizePath(pathUri.getPath()), null, fragment);
-        } catch (URISyntaxException e) {
-            throw new IllegalArgumentException(e);
-        }
-        return new Path(newUri);
-    }
-}
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/java/org/apache/hadoop/mapreduce/InputSplit.java b/pregelix/pregelix-core/src/main/java/org/apache/hadoop/mapreduce/InputSplit.java
deleted file mode 100644
index ac72160..0000000
--- a/pregelix/pregelix-core/src/main/java/org/apache/hadoop/mapreduce/InputSplit.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.mapreduce;
-
-import java.io.IOException;
-import java.io.Serializable;
-
-/**
- * <code>InputSplit</code> represents the data to be processed by an individual {@link Mapper}.
- * <p>
- * Typically, it presents a byte-oriented view on the input and is the responsibility of {@link RecordReader} of the job to process this and present a record-oriented view.
- * 
- * @see InputFormat
- * @see RecordReader
- */
-public abstract class InputSplit implements Serializable {
-    private static final long serialVersionUID = 1L;
-
-    /**
-     * Get the size of the split, so that the input splits can be sorted by
-     * size.
-     * 
-     * @return the number of bytes in the split
-     * @throws IOException
-     * @throws InterruptedException
-     */
-    public abstract long getLength() throws IOException, InterruptedException;
-
-    /**
-     * Get the list of nodes by name where the data for the split would be
-     * local. The locations do not need to be serialized.
-     * 
-     * @return a new array of the node nodes.
-     * @throws IOException
-     * @throws InterruptedException
-     */
-    public abstract String[] getLocations() throws IOException, InterruptedException;
-}
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/resources/conf/topology-template.xml b/pregelix/pregelix-core/src/main/resources/conf/topology-template.xml
new file mode 100755
index 0000000..4710706
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/conf/topology-template.xml
@@ -0,0 +1,7 @@
+<cluster-topology>
+    <network-switch name="Global">
+        <network-switch name="local">
+            <terminal name="127.0.0.1"/>
+        </network-switch>
+    </network-switch>
+</cluster-topology>
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/getip.sh b/pregelix/pregelix-core/src/main/resources/scripts/getip.sh
index e0cdf73..a691c0f 100755
--- a/pregelix/pregelix-core/src/main/resources/scripts/getip.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/getip.sh
@@ -6,6 +6,10 @@
 then
         #Get IP Address
         IPADDR=`/sbin/ifconfig eth0 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+    	if [ "$IPADDR" = "" ]
+        then
+		IPADDR=`/sbin/ifconfig em1 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+        fi 
 	if [ "$IPADDR" = "" ]
         then
 		IPADDR=`/sbin/ifconfig lo | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/pregelix b/pregelix/pregelix-core/src/main/resources/scripts/pregelix
index c3fd27b..b1a2f74 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/pregelix
+++ b/pregelix/pregelix-core/src/main/resources/scripts/pregelix
@@ -91,7 +91,7 @@
   REPO="$BASEDIR"/lib
 fi
 
-CLASSPATH=$CLASSPATH_PREFIX:"$HADOOP_HOME"/conf:"$BASEDIR"/etc:$(echo ${REPO}/*.jar | tr ' ' ':'):$1
+CLASSPATH=$CLASSPATH_PREFIX:"$HADOOP_HOME"/conf:/etc/hadoop/conf:"$BASEDIR"/etc:$1
 
 # For Cygwin, switch paths to Windows format before running java
 if $cygwin; then
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startAllNCs.sh b/pregelix/pregelix-core/src/main/resources/scripts/startAllNCs.sh
index 629bd90..d30da26 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/startAllNCs.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startAllNCs.sh
@@ -2,5 +2,5 @@
 
 for i in `cat conf/slaves`
 do
-   ssh $i "cd ${PREGELIX_PATH}; bin/startnc.sh"
+   ssh $i "cd ${PREGELIX_PATH}; export JAVA_HOME=${JAVA_HOME}; bin/startnc.sh"
 done
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startcc.sh b/pregelix/pregelix-core/src/main/resources/scripts/startcc.sh
index fe2551d..133b604 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/startcc.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startcc.sh
@@ -20,6 +20,12 @@
 export JAVA_HOME=$JAVA_HOME
 export JAVA_OPTS=$CCJAVA_OPTS
 
-#Launch hyracks cc script
+
 chmod -R 755 $HYRACKS_HOME
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 3 &> $CCLOGS_DIR/cc.log &
+if [ -f "conf/topology.xml"  ]; then
+#Launch hyracks cc script with topology
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 -cluster-topology "conf/topology.xml" &> $CCLOGS_DIR/cc.log &
+else
+#Launch hyracks cc script without toplogy
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 &> $CCLOGS_DIR/cc.log &
+fi
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startnc.sh b/pregelix/pregelix-core/src/main/resources/scripts/startnc.sh
index 6e0f90e..b059aad 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/startnc.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startnc.sh
@@ -46,4 +46,4 @@
 cd $NCTMP_DIR
 
 #Launch hyracks nc
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR  -data-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR  -data-ip-address $IPADDR -result-ip-address $IPADDR  -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/stopnc.sh b/pregelix/pregelix-core/src/main/resources/scripts/stopnc.sh
index 03ce4e7..35c4794 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/stopnc.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/stopnc.sh
@@ -5,6 +5,10 @@
 PID=`ps -ef|grep ${USER}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
 
 if [ "$PID" == "" ]; then
+  PID=`ps -ef|grep ${USER}|grep java|grep 'hyracks'|awk '{print $2}'`
+fi
+
+if [ "$PID" == "" ]; then
   USERID=`id | sed 's/^uid=//;s/(.*$//'`
   PID=`ps -ef|grep ${USERID}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
 fi
diff --git a/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/join/JoinTest.java b/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/join/JoinTest.java
index 4dfe57d..f7cadf6 100644
--- a/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/join/JoinTest.java
+++ b/pregelix/pregelix-core/src/test/java/edu/uci/ics/pregelix/core/join/JoinTest.java
@@ -62,7 +62,7 @@
 import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
 import edu.uci.ics.pregelix.core.util.PregelixHyracksIntegrationUtil;
 import edu.uci.ics.pregelix.core.util.TestUtils;
-import edu.uci.ics.pregelix.dataflow.std.FileWriteOperatorDescriptor;
+import edu.uci.ics.pregelix.dataflow.VertexWriteOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.std.IndexNestedLoopJoinOperatorDescriptor;
 import edu.uci.ics.pregelix.dataflow.std.ProjectOperatorDescriptor;
 import edu.uci.ics.pregelix.runtime.bootstrap.StorageManagerInterface;
@@ -102,7 +102,7 @@
         ClusterConfig.setStorePath(PATH_TO_CLUSTER_STORE);
         ClusterConfig.setClusterPropertiesPath(PATH_TO_CLUSTER_PROPERTIES);
         cleanupStores();
-        PregelixHyracksIntegrationUtil.init();
+        PregelixHyracksIntegrationUtil.init("src/test/resources/topology.xml");
         PregelixHyracksIntegrationUtil.createApp(HYRACKS_APP_NAME);
 
         FileUtils.forceMkdir(new File(EXPECT_RESULT_DIR));
@@ -195,7 +195,7 @@
         FileSplit[] results = new FileSplit[1];
         results[0] = resultFile;
         IFileSplitProvider resultFileSplitProvider = new ConstantFileSplitProvider(results);
-        FileWriteOperatorDescriptor writer = new FileWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
                 null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { NC1_ID });
         PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
@@ -362,7 +362,7 @@
         FileSplit[] results = new FileSplit[1];
         results[0] = resultFile;
         IFileSplitProvider resultFileSplitProvider = new ConstantFileSplitProvider(results);
-        FileWriteOperatorDescriptor writer = new FileWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
                 null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { NC1_ID });
         PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
@@ -459,7 +459,7 @@
         FileSplit[] results = new FileSplit[1];
         results[0] = resultFile;
         IFileSplitProvider resultFileSplitProvider = new ConstantFileSplitProvider(results);
-        FileWriteOperatorDescriptor writer = new FileWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
                 null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { NC1_ID });
         PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
@@ -566,7 +566,7 @@
         FileSplit[] results = new FileSplit[1];
         results[0] = resultFile;
         IFileSplitProvider resultFileSplitProvider = new ConstantFileSplitProvider(results);
-        FileWriteOperatorDescriptor writer = new FileWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
+        VertexWriteOperatorDescriptor writer = new VertexWriteOperatorDescriptor(spec, null, resultFileSplitProvider, null,
                 null);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, writer, new String[] { NC1_ID });
         PartitionConstraintHelper.addPartitionCountConstraint(spec, writer, 1);
diff --git a/pregelix/pregelix-core/src/test/resources/hadoop/conf/mapred-site.xml b/pregelix/pregelix-core/src/test/resources/hadoop/conf/mapred-site.xml
index 1b9a4d6..f89dd79 100644
--- a/pregelix/pregelix-core/src/test/resources/hadoop/conf/mapred-site.xml
+++ b/pregelix/pregelix-core/src/test/resources/hadoop/conf/mapred-site.xml
@@ -18,8 +18,8 @@
       <value>20</value>
    </property>
    <property>
-      <name>mapred.min.split.size</name>
-      <value>65536</value>
+      <name>mapred.max.split.size</name>
+      <value>4096</value>
    </property>
 
 </configuration>
diff --git a/pregelix/pregelix-core/src/test/resources/topology.xml b/pregelix/pregelix-core/src/test/resources/topology.xml
new file mode 100755
index 0000000..4710706
--- /dev/null
+++ b/pregelix/pregelix-core/src/test/resources/topology.xml
@@ -0,0 +1,7 @@
+<cluster-topology>
+    <network-switch name="Global">
+        <network-switch name="local">
+            <terminal name="127.0.0.1"/>
+        </network-switch>
+    </network-switch>
+</cluster-topology>
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std-base/pom.xml b/pregelix/pregelix-dataflow-std-base/pom.xml
index 53d7c22..caa3222 100644
--- a/pregelix/pregelix-dataflow-std-base/pom.xml
+++ b/pregelix/pregelix-dataflow-std-base/pom.xml
@@ -22,8 +22,9 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
 				</configuration>
 			</plugin>
 			<plugin>
@@ -42,6 +43,7 @@
 			</plugin>
 			<plugin>
 				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
 				<configuration>
 					<filesets>
 						<fileset>
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IUpdateFunction.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IUpdateFunction.java
index 62f92dd..a0d365f 100644
--- a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IUpdateFunction.java
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IUpdateFunction.java
@@ -16,17 +16,19 @@
 package edu.uci.ics.pregelix.dataflow.std.base;
 
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 public interface IUpdateFunction extends IFunction {
 
-    /**
-     * update the tuple pointed by tupleRef called after process,
-     * one-input-tuple-at-a-time
-     * 
-     * @param tupleRef
-     * @throws HyracksDataException
-     */
-    public void update(ITupleReference tupleRef) throws HyracksDataException;
+	/**
+	 * update the tuple pointed by tupleRef called after process,
+	 * one-input-tuple-at-a-time
+	 * 
+	 * @param tupleRef
+	 * @throws HyracksDataException
+	 */
+	public void update(ITupleReference tupleRef, ArrayTupleBuilder cloneUpdateTb)
+			throws HyracksDataException;
 
 }
diff --git a/pregelix/pregelix-dataflow-std/pom.xml b/pregelix/pregelix-dataflow-std/pom.xml
index cc9a184..c5a0913 100644
--- a/pregelix/pregelix-dataflow-std/pom.xml
+++ b/pregelix/pregelix-dataflow-std/pom.xml
@@ -1,14 +1,15 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<artifactId>pregelix-dataflow-std</artifactId>
 	<packaging>jar</packaging>
 	<name>pregelix-dataflow-std</name>
 
 	<parent>
-    		<groupId>edu.uci.ics.hyracks</groupId>
-    		<artifactId>pregelix</artifactId>
-    		<version>0.2.3-SNAPSHOT</version>
-  	</parent>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<artifactId>pregelix</artifactId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
 
 
 	<properties>
@@ -22,8 +23,9 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
 				</configuration>
 			</plugin>
 			<plugin>
@@ -42,6 +44,7 @@
 			</plugin>
 			<plugin>
 				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
 				<configuration>
 					<filesets>
 						<fileset>
@@ -100,11 +103,13 @@
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-data-std</artifactId>
 			<version>0.2.3-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
 		</dependency>
 		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-core</artifactId>
-			<version>0.20.2</version>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-hdfs-core</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
 			<type>jar</type>
 			<scope>compile</scope>
 		</dependency>
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorNodePushable.java
index fb84aa0..3938613 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorNodePushable.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorNodePushable.java
@@ -43,6 +43,7 @@
 import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
 import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
 import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
 
 public class BTreeSearchFunctionUpdateOperatorNodePushable extends AbstractUnaryInputOperatorNodePushable {
     protected TreeIndexDataflowHelper treeIndexHelper;
@@ -70,6 +71,8 @@
 
     private final IFrameWriter[] writers;
     private final FunctionProxy functionProxy;
+    private ArrayTupleBuilder cloneUpdateTb;
+    private final UpdateBuffer updateBuffer;
 
     public BTreeSearchFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
             IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
@@ -94,6 +97,7 @@
         this.writers = new IFrameWriter[outputArity];
         this.functionProxy = new FunctionProxy(ctx, functionFactory, preHookFactory, postHookFactory, inputRdFactory,
                 writers);
+        this.updateBuffer = new UpdateBuffer(ctx, 2);
     }
 
     @Override
@@ -122,6 +126,9 @@
             appender = new FrameTupleAppender(treeIndexHelper.getHyracksTaskContext().getFrameSize());
             appender.reset(writeBuffer, true);
             indexAccessor = btree.createAccessor();
+
+            cloneUpdateTb = new ArrayTupleBuilder(btree.getFieldCount());
+            updateBuffer.setFieldCount(btree.getFieldCount());
         } catch (Exception e) {
             treeIndexHelper.deinit();
             throw new HyracksDataException(e);
@@ -136,7 +143,24 @@
         while (cursor.hasNext()) {
             cursor.next();
             ITupleReference tuple = cursor.getTuple();
-            functionProxy.functionCall(tuple);
+            functionProxy.functionCall(tuple, cloneUpdateTb);
+
+            //doing clone update
+            if (cloneUpdateTb.getSize() > 0) {
+                if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                    //release the cursor/latch
+                    cursor.close();
+                    //batch update
+                    updateBuffer.updateBTree(indexAccessor);
+
+                    //search again
+                    cursor.reset();
+                    rangePred.setLowKey(tuple, true);
+                    rangePred.setHighKey(highKey, highKeyInclusive);
+                    indexAccessor.search(cursor, rangePred);
+                }
+            }
+            cloneUpdateTb.reset();
         }
     }
 
@@ -168,6 +192,8 @@
         try {
             try {
                 cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
             } catch (Exception e) {
                 throw new HyracksDataException(e);
             }
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FileWriteOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FileWriteOperatorDescriptor.java
deleted file mode 100644
index 356f06c..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FileWriteOperatorDescriptor.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std;
-
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.PrintWriter;
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.util.StringSerializationUtils;
-import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
-import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
-
-public class FileWriteOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-    private static final long serialVersionUID = 1L;
-    private final FileSplit[] splits;
-    private final IRuntimeHookFactory preHookFactory;
-    private final IRuntimeHookFactory postHookFactory;
-    private final IRecordDescriptorFactory inputRdFactory;
-
-    public FileWriteOperatorDescriptor(JobSpecification spec, IRecordDescriptorFactory inputRdFactory,
-            IFileSplitProvider fileSplitProvider, IRuntimeHookFactory preHookFactory,
-            IRuntimeHookFactory postHookFactory) {
-        super(spec, 1, 0);
-        this.splits = fileSplitProvider.getFileSplits();
-        this.preHookFactory = preHookFactory;
-        this.postHookFactory = postHookFactory;
-        this.inputRdFactory = inputRdFactory;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-            final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
-            throws HyracksDataException {
-        IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
-            private RecordDescriptor rd0;
-            private FrameDeserializer frameDeserializer;
-            private PrintWriter outputWriter;
-            private final static String separator = "|";
-
-            @Override
-            public void open() throws HyracksDataException {
-                rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)
-                        : inputRdFactory.createRecordDescriptor();
-                frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0);
-                try {
-                    outputWriter = new PrintWriter(new OutputStreamWriter(new FileOutputStream(splits[partition]
-                            .getLocalFile().getFile())));
-                    if (preHookFactory != null)
-                        preHookFactory.createRuntimeHook().configure(ctx);
-                } catch (IOException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-
-            @Override
-            public void nextFrame(ByteBuffer frame) throws HyracksDataException {
-                frameDeserializer.reset(frame);
-                while (!frameDeserializer.done()) {
-                    Object[] tuple = frameDeserializer.deserializeRecord();
-                    for (int i = 0; i < tuple.length - 1; i++) {
-                        outputWriter.print(StringSerializationUtils.toString(tuple[i]));
-                        outputWriter.print(separator);
-                    }
-                    outputWriter.print(StringSerializationUtils.toString(tuple[tuple.length - 1]));
-                    outputWriter.println();
-                }
-            }
-
-            @Override
-            public void fail() throws HyracksDataException {
-
-            }
-
-            @Override
-            public void close() throws HyracksDataException {
-                if (postHookFactory != null)
-                    postHookFactory.createRuntimeHook().configure(ctx);
-                outputWriter.close();
-            }
-
-        };
-        return op;
-    }
-}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FunctionCallOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FunctionCallOperatorDescriptor.java
index ee3ac82..4cbd6c4 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FunctionCallOperatorDescriptor.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FunctionCallOperatorDescriptor.java
@@ -61,22 +61,21 @@
             private FrameDeserializer frameDeserializer;
             private final IFrameWriter[] writers = new IFrameWriter[outputArity];
             private final IFunction function = functionFactory.createFunction();
+            private ClassLoader ctxCL = Thread.currentThread().getContextClassLoader();
 
             @Override
-            public void close() throws HyracksDataException {
-                if (postHookFactory != null)
-                    postHookFactory.createRuntimeHook().configure(ctx);
-                function.close();
+            public void open() throws HyracksDataException {
+                rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)
+                        : inputRdFactory.createRecordDescriptor();
+                frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0);
+                ctxCL = Thread.currentThread().getContextClassLoader();
+                Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
                 for (IFrameWriter writer : writers) {
-                    writer.close();
+                    writer.open();
                 }
-            }
-
-            @Override
-            public void fail() throws HyracksDataException {
-                for (IFrameWriter writer : writers) {
-                    writer.fail();
-                }
+                if (preHookFactory != null)
+                    preHookFactory.createRuntimeHook().configure(ctx);
+                function.open(ctx, rd0, writers);
             }
 
             @Override
@@ -89,17 +88,21 @@
             }
 
             @Override
-            public void open() throws HyracksDataException {
-                rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)
-                        : inputRdFactory.createRecordDescriptor();
-                frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0);
-                Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+            public void close() throws HyracksDataException {
+                if (postHookFactory != null)
+                    postHookFactory.createRuntimeHook().configure(ctx);
+                function.close();
                 for (IFrameWriter writer : writers) {
-                    writer.open();
+                    writer.close();
                 }
-                if (preHookFactory != null)
-                    preHookFactory.createRuntimeHook().configure(ctx);
-                function.open(ctx, rd0, writers);
+                Thread.currentThread().setContextClassLoader(ctxCL);
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+                for (IFrameWriter writer : writers) {
+                    writer.fail();
+                }
             }
 
             @Override
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorNodePushable.java
index 75a8087..37029f3 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorNodePushable.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorNodePushable.java
@@ -14,7 +14,6 @@
  */
 package edu.uci.ics.pregelix.dataflow.std;
 
-import java.io.DataOutput;
 import java.nio.ByteBuffer;
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
@@ -44,6 +43,7 @@
 import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
 import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
 import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
 
 public class IndexNestedLoopJoinFunctionUpdateOperatorNodePushable extends AbstractUnaryInputOperatorNodePushable {
     private TreeIndexDataflowHelper treeIndexOpHelper;
@@ -51,9 +51,6 @@
 
     private ByteBuffer writeBuffer;
     private FrameTupleAppender appender;
-    private ArrayTupleBuilder tb;
-    private DataOutput dos;
-
     private BTree btree;
     private PermutingFrameTupleReference lowKey;
     private PermutingFrameTupleReference highKey;
@@ -67,17 +64,16 @@
     protected ITreeIndexAccessor indexAccessor;
 
     private RecordDescriptor recDesc;
-    private final RecordDescriptor inputRecDesc;
-
     private final IFrameWriter[] writers;
     private final FunctionProxy functionProxy;
+    private ArrayTupleBuilder cloneUpdateTb;
+    private final UpdateBuffer updateBuffer;
 
     public IndexNestedLoopJoinFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
             IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
             int[] lowKeyFields, int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive,
             IUpdateFunctionFactory functionFactory, IRuntimeHookFactory preHookFactory,
             IRuntimeHookFactory postHookFactory, IRecordDescriptorFactory inputRdFactory, int outputArity) {
-        inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
         treeIndexOpHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
                 opDesc, ctx, partition);
         this.lowKeyInclusive = lowKeyInclusive;
@@ -95,6 +91,7 @@
         this.writers = new IFrameWriter[outputArity];
         this.functionProxy = new FunctionProxy(ctx, functionFactory, preHookFactory, postHookFactory, inputRdFactory,
                 writers);
+        this.updateBuffer = new UpdateBuffer(ctx, 2);
     }
 
     protected void setCursor() {
@@ -144,12 +141,12 @@
             rangePred = new RangePredicate(null, null, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp,
                     highKeySearchCmp);
             writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
-            tb = new ArrayTupleBuilder(inputRecDesc.getFields().length + btree.getFieldCount());
-            dos = tb.getDataOutput();
             appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
             appender.reset(writeBuffer, true);
 
             indexAccessor = btree.createAccessor();
+            cloneUpdateTb = new ArrayTupleBuilder(btree.getFieldCount());
+            updateBuffer.setFieldCount(btree.getFieldCount());
         } catch (Exception e) {
             treeIndexOpHelper.deinit();
             throw new HyracksDataException(e);
@@ -158,27 +155,29 @@
 
     private void writeSearchResults(IFrameTupleAccessor leftAccessor, int tIndex) throws Exception {
         while (cursor.hasNext()) {
-            tb.reset();
             cursor.next();
-
             ITupleReference tupleRef = cursor.getTuple();
-            for (int i = 0; i < inputRecDesc.getFields().length; i++) {
-                int tupleStart = leftAccessor.getTupleStartOffset(tIndex);
-                int fieldStart = leftAccessor.getFieldStartOffset(tIndex, i);
-                int offset = leftAccessor.getFieldSlotsLength() + tupleStart + fieldStart;
-                int len = leftAccessor.getFieldEndOffset(tIndex, i) - fieldStart;
-                dos.write(leftAccessor.getBuffer().array(), offset, len);
-                tb.addFieldEndOffset();
-            }
-            for (int i = 0; i < tupleRef.getFieldCount(); i++) {
-                dos.write(tupleRef.getFieldData(i), tupleRef.getFieldStart(i), tupleRef.getFieldLength(i));
-                tb.addFieldEndOffset();
-            }
 
             /**
              * call the update function
              */
-            functionProxy.functionCall(tb, tupleRef);
+            functionProxy.functionCall(leftAccessor, tIndex, tupleRef, cloneUpdateTb);
+
+            if (cloneUpdateTb.getSize() > 0) {
+                if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                    //release the cursor/latch
+                    cursor.close();
+                    //batch update
+                    updateBuffer.updateBTree(indexAccessor);
+
+                    //search again
+                    cursor.reset();
+                    rangePred.setLowKey(tupleRef, true);
+                    rangePred.setHighKey(highKey, highKeyInclusive);
+                    indexAccessor.search(cursor, rangePred);
+                }
+            }
+            cloneUpdateTb.reset();
         }
     }
 
@@ -210,6 +209,8 @@
         try {
             try {
                 cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
             } catch (Exception e) {
                 throw new HyracksDataException(e);
             }
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable.java
index c31ebd4..f7b3d62 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable.java
@@ -45,6 +45,7 @@
 import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
 import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
 import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
 
 public class IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable extends
         AbstractUnaryInputOperatorNodePushable {
@@ -53,7 +54,7 @@
 
     private ByteBuffer writeBuffer;
     private FrameTupleAppender appender;
-    private ArrayTupleBuilder tb;
+    private ArrayTupleBuilder nullTupleBuilder;
     private DataOutput dos;
 
     private BTree btree;
@@ -76,6 +77,8 @@
 
     private final IFrameWriter[] writers;
     private final FunctionProxy functionProxy;
+    private ArrayTupleBuilder cloneUpdateTb;
+    private UpdateBuffer updateBuffer;
 
     public IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
             IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
@@ -100,6 +103,7 @@
         this.writers = new IFrameWriter[outputArity];
         this.functionProxy = new FunctionProxy(ctx, functionFactory, preHookFactory, postHookFactory, inputRdFactory,
                 writers);
+        this.updateBuffer = new UpdateBuffer(ctx, 2);
     }
 
     protected void setCursor() {
@@ -144,8 +148,15 @@
             rangePred = new RangePredicate(null, null, true, true, lowKeySearchCmp, highKeySearchCmp);
 
             writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
-            tb = new ArrayTupleBuilder(inputRecDesc.getFields().length + btree.getFieldCount());
-            dos = tb.getDataOutput();
+
+            nullTupleBuilder = new ArrayTupleBuilder(inputRecDesc.getFields().length);
+            dos = nullTupleBuilder.getDataOutput();
+            nullTupleBuilder.reset();
+            for (int i = 0; i < inputRecDesc.getFields().length; i++) {
+                nullWriter[i].writeNull(dos);
+                nullTupleBuilder.addFieldEndOffset();
+            }
+
             appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
             appender.reset(writeBuffer, true);
 
@@ -164,32 +175,38 @@
                 match = false;
             }
 
+            cloneUpdateTb = new ArrayTupleBuilder(btree.getFieldCount());
+            updateBuffer.setFieldCount(btree.getFieldCount());
         } catch (Exception e) {
             treeIndexOpHelper.deinit();
             throw new HyracksDataException(e);
         }
     }
 
+    //for the join match casesos
     private void writeResults(IFrameTupleAccessor leftAccessor, int tIndex, ITupleReference frameTuple)
             throws Exception {
-        tb.reset();
-        for (int i = 0; i < inputRecDesc.getFields().length; i++) {
-            int tupleStart = leftAccessor.getTupleStartOffset(tIndex);
-            int fieldStart = leftAccessor.getFieldStartOffset(tIndex, i);
-            int offset = leftAccessor.getFieldSlotsLength() + tupleStart + fieldStart;
-            int len = leftAccessor.getFieldEndOffset(tIndex, i) - fieldStart;
-            dos.write(leftAccessor.getBuffer().array(), offset, len);
-            tb.addFieldEndOffset();
-        }
-        for (int i = 0; i < frameTuple.getFieldCount(); i++) {
-            dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
-            tb.addFieldEndOffset();
-        }
-
         /**
          * function call
          */
-        functionProxy.functionCall(tb, frameTuple);
+        functionProxy.functionCall(leftAccessor, tIndex, frameTuple, cloneUpdateTb);
+
+        //doing clone update
+        if (cloneUpdateTb.getSize() > 0) {
+            if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                //release the cursor/latch
+                cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+
+                //search again and recover the cursor
+                cursor.reset();
+                rangePred.setLowKey(frameTuple, true);
+                rangePred.setHighKey(null, true);
+                indexAccessor.search(cursor, rangePred);
+            }
+            cloneUpdateTb.reset();
+        }
     }
 
     @Override
@@ -243,6 +260,8 @@
             }
             try {
                 cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
             } catch (Exception e) {
                 throw new HyracksDataException(e);
             }
@@ -271,20 +290,27 @@
 
     /** write result for outer case */
     private void writeResults(ITupleReference frameTuple) throws Exception {
-        tb.reset();
-        for (int i = 0; i < inputRecDesc.getFields().length; i++) {
-            nullWriter[i].writeNull(dos);
-            tb.addFieldEndOffset();
-        }
-        for (int i = 0; i < frameTuple.getFieldCount(); i++) {
-            dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
-            tb.addFieldEndOffset();
-        }
-
         /**
          * function call
          */
-        functionProxy.functionCall(tb, frameTuple);
+        functionProxy.functionCall(nullTupleBuilder, frameTuple, cloneUpdateTb);
+
+        //doing clone update
+        if (cloneUpdateTb.getSize() > 0) {
+            if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                //release the cursor/latch
+                cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+
+                //search again and recover the cursor
+                cursor.reset();
+                rangePred.setLowKey(frameTuple, true);
+                rangePred.setHighKey(null, true);
+                indexAccessor.search(cursor, rangePred);
+            }
+            cloneUpdateTb.reset();
+        }
     }
 
     @Override
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java
index 0a966b5..6af60a8 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java
@@ -14,7 +14,6 @@
  */
 package edu.uci.ics.pregelix.dataflow.std;
 
-import java.io.DataOutput;
 import java.nio.ByteBuffer;
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
@@ -44,6 +43,7 @@
 import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
 import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
 import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
 
 public class IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable extends AbstractUnaryInputOperatorNodePushable {
     private TreeIndexDataflowHelper treeIndexOpHelper;
@@ -51,8 +51,6 @@
 
     private ByteBuffer writeBuffer;
     private FrameTupleAppender appender;
-    private ArrayTupleBuilder tb;
-    private DataOutput dos;
 
     private BTree btree;
     private boolean isForward;
@@ -63,8 +61,6 @@
     protected ITreeIndexAccessor indexAccessor;
 
     private RecordDescriptor recDesc;
-    private final RecordDescriptor inputRecDesc;
-
     private PermutingFrameTupleReference lowKey;
     private PermutingFrameTupleReference highKey;
 
@@ -73,13 +69,14 @@
 
     private final IFrameWriter[] writers;
     private final FunctionProxy functionProxy;
+    private ArrayTupleBuilder cloneUpdateTb;
+    private UpdateBuffer updateBuffer;
 
     public IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
             IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
             int[] lowKeyFields, int[] highKeyFields, IUpdateFunctionFactory functionFactory,
             IRuntimeHookFactory preHookFactory, IRuntimeHookFactory postHookFactory,
             IRecordDescriptorFactory inputRdFactory, int outputArity) {
-        inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
         treeIndexOpHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
                 opDesc, ctx, partition);
         this.isForward = isForward;
@@ -97,6 +94,7 @@
         this.writers = new IFrameWriter[outputArity];
         this.functionProxy = new FunctionProxy(ctx, functionFactory, preHookFactory, postHookFactory, inputRdFactory,
                 writers);
+        this.updateBuffer = new UpdateBuffer(ctx, 2);
     }
 
     protected void setCursor() {
@@ -123,8 +121,6 @@
             lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
 
             writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
-            tb = new ArrayTupleBuilder(btree.getFieldCount());
-            dos = tb.getDataOutput();
             appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
             appender.reset(writeBuffer, true);
 
@@ -142,7 +138,8 @@
                 currentTopTuple = cursor.getTuple();
                 match = false;
             }
-
+            cloneUpdateTb = new ArrayTupleBuilder(btree.getFieldCount());
+            updateBuffer.setFieldCount(btree.getFieldCount());
         } catch (Exception e) {
             treeIndexOpHelper.deinit();
             throw new HyracksDataException(e);
@@ -207,6 +204,9 @@
             }
             try {
                 cursor.close();
+
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
             } catch (Exception e) {
                 throw new HyracksDataException(e);
             }
@@ -231,29 +231,47 @@
 
     /** write the right result */
     private void writeRightResults(ITupleReference frameTuple) throws Exception {
-        tb.reset();
-        for (int i = 0; i < frameTuple.getFieldCount(); i++) {
-            dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
-            tb.addFieldEndOffset();
-        }
+        functionProxy.functionCall(frameTuple, cloneUpdateTb);
 
-        functionProxy.functionCall(tb, frameTuple);
+        //doing clone update
+        if (cloneUpdateTb.getSize() > 0) {
+            if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                //release the cursor/latch
+                cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+
+                //search again
+                cursor.reset();
+                rangePred.setLowKey(frameTuple, true);
+                rangePred.setHighKey(null, true);
+                indexAccessor.search(cursor, rangePred);
+            }
+            cloneUpdateTb.reset();
+        }
     }
 
     /** write the left result */
     private void writeLeftResults(IFrameTupleAccessor leftAccessor, int tIndex, ITupleReference frameTuple)
             throws Exception {
-        tb.reset();
-        for (int i = 0; i < inputRecDesc.getFields().length; i++) {
-            int tupleStart = leftAccessor.getTupleStartOffset(tIndex);
-            int fieldStart = leftAccessor.getFieldStartOffset(tIndex, i);
-            int offset = leftAccessor.getFieldSlotsLength() + tupleStart + fieldStart;
-            int len = leftAccessor.getFieldEndOffset(tIndex, i) - fieldStart;
-            dos.write(leftAccessor.getBuffer().array(), offset, len);
-            tb.addFieldEndOffset();
-        }
+        functionProxy.functionCall(leftAccessor, tIndex, frameTuple, cloneUpdateTb);
 
-        functionProxy.functionCall(tb, frameTuple);
+        //doing clone update
+        if (cloneUpdateTb.getSize() > 0) {
+            if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+                //release the cursor/latch
+                cursor.close();
+                //batch update
+                updateBuffer.updateBTree(indexAccessor);
+
+                //search again
+                cursor.reset();
+                rangePred.setLowKey(frameTuple, true);
+                rangePred.setHighKey(null, true);
+                indexAccessor.search(cursor, rangePred);
+            }
+            cloneUpdateTb.reset();
+        }
     }
 
     @Override
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/FunctionProxy.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/FunctionProxy.java
index 4b0f4a5..99bca1a 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/FunctionProxy.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/FunctionProxy.java
@@ -15,6 +15,7 @@
 
 package edu.uci.ics.pregelix.dataflow.util;
 
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
@@ -36,6 +37,7 @@
     private final IFrameWriter[] writers;
     private TupleDeserializer tupleDe;
     private RecordDescriptor inputRd;
+    private ClassLoader ctxCL;
 
     public FunctionProxy(IHyracksTaskContext ctx, IUpdateFunctionFactory functionFactory,
             IRuntimeHookFactory preHookFactory, IRuntimeHookFactory postHookFactory,
@@ -56,6 +58,7 @@
     public void functionOpen() throws HyracksDataException {
         inputRd = inputRdFactory.createRecordDescriptor();
         tupleDe = new TupleDeserializer(inputRd);
+        ctxCL = Thread.currentThread().getContextClassLoader();
         Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
         for (IFrameWriter writer : writers) {
             writer.open();
@@ -68,16 +71,19 @@
     /**
      * Call the function
      * 
-     * @param tb
-     *            input data
+     * @param leftAccessor
+     *            input page accessor
+     * @param leftTupleIndex
+     *            the tuple index in the page
      * @param updateRef
      *            update pointer
      * @throws HyracksDataException
      */
-    public void functionCall(ArrayTupleBuilder tb, ITupleReference updateRef) throws HyracksDataException {
-        Object[] tuple = tupleDe.deserializeRecord(tb);
+    public void functionCall(IFrameTupleAccessor leftAccessor, int leftTupleIndex, ITupleReference right,
+            ArrayTupleBuilder cloneUpdateTb) throws HyracksDataException {
+        Object[] tuple = tupleDe.deserializeRecord(leftAccessor, leftTupleIndex, right);
         function.process(tuple);
-        function.update(updateRef);
+        function.update(right, cloneUpdateTb);
     }
 
     /**
@@ -86,10 +92,26 @@
      * @param updateRef
      * @throws HyracksDataException
      */
-    public void functionCall(ITupleReference updateRef) throws HyracksDataException {
+    public void functionCall(ITupleReference updateRef, ArrayTupleBuilder cloneUpdateTb) throws HyracksDataException {
         Object[] tuple = tupleDe.deserializeRecord(updateRef);
         function.process(tuple);
-        function.update(updateRef);
+        function.update(updateRef, cloneUpdateTb);
+    }
+
+    /**
+     * Call the function
+     * 
+     * @param tb
+     *            input data
+     * @param inPlaceUpdateRef
+     *            update pointer
+     * @throws HyracksDataException
+     */
+    public void functionCall(ArrayTupleBuilder tb, ITupleReference inPlaceUpdateRef, ArrayTupleBuilder cloneUpdateTb)
+            throws HyracksDataException {
+        Object[] tuple = tupleDe.deserializeRecord(tb, inPlaceUpdateRef);
+        function.process(tuple);
+        function.update(inPlaceUpdateRef, cloneUpdateTb);
     }
 
     /**
@@ -104,5 +126,6 @@
         for (IFrameWriter writer : writers) {
             writer.close();
         }
+        Thread.currentThread().setContextClassLoader(ctxCL);
     }
 }
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/TupleDeserializer.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/TupleDeserializer.java
index 5ae1d81..4fe83db 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/TupleDeserializer.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/TupleDeserializer.java
@@ -20,6 +20,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -42,7 +43,7 @@
     }
 
     public Object[] deserializeRecord(ITupleReference tupleRef) throws HyracksDataException {
-        for (int i = 0; i < record.length; ++i) {
+        for (int i = 0; i < tupleRef.getFieldCount(); ++i) {
             byte[] data = tupleRef.getFieldData(i);
             int offset = tupleRef.getFieldStart(i);
             bbis.setByteArray(data, offset);
@@ -65,11 +66,65 @@
         return record;
     }
 
-    public Object[] deserializeRecord(ArrayTupleBuilder tb) throws HyracksDataException {
+    public Object[] deserializeRecord(IFrameTupleAccessor left, int tIndex, ITupleReference right)
+            throws HyracksDataException {
+        byte[] data = left.getBuffer().array();
+        int tStart = left.getTupleStartOffset(tIndex) + left.getFieldSlotsLength();
+        int leftFieldCount = left.getFieldCount();
+        int fStart = tStart;
+        for (int i = 0; i < leftFieldCount; ++i) {
+            /**
+             * reset the input
+             */
+            fStart = tStart + left.getFieldStartOffset(tIndex, i);
+            bbis.setByteArray(data, fStart);
+
+            /**
+             * do deserialization
+             */
+            Object instance = recordDescriptor.getFields()[i].deserialize(di);
+            if (LOGGER.isLoggable(Level.FINEST)) {
+                LOGGER.finest(i + " " + instance);
+            }
+            record[i] = instance;
+            if (FrameConstants.DEBUG_FRAME_IO) {
+                try {
+                    if (di.readInt() != FrameConstants.FRAME_FIELD_MAGIC) {
+                        throw new HyracksDataException("Field magic mismatch");
+                    }
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+        for (int i = leftFieldCount; i < record.length; ++i) {
+            byte[] rightData = right.getFieldData(i - leftFieldCount);
+            int rightOffset = right.getFieldStart(i - leftFieldCount);
+            bbis.setByteArray(rightData, rightOffset);
+
+            Object instance = recordDescriptor.getFields()[i].deserialize(di);
+            if (LOGGER.isLoggable(Level.FINEST)) {
+                LOGGER.finest(i + " " + instance);
+            }
+            record[i] = instance;
+            if (FrameConstants.DEBUG_FRAME_IO) {
+                try {
+                    if (di.readInt() != FrameConstants.FRAME_FIELD_MAGIC) {
+                        throw new HyracksDataException("Field magic mismatch");
+                    }
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+        return record;
+    }
+
+    public Object[] deserializeRecord(ArrayTupleBuilder tb, ITupleReference right) throws HyracksDataException {
         byte[] data = tb.getByteArray();
         int[] offset = tb.getFieldEndOffsets();
         int start = 0;
-        for (int i = 0; i < record.length; ++i) {
+        for (int i = 0; i < offset.length; ++i) {
             /**
              * reset the input
              */
@@ -94,6 +149,26 @@
                 }
             }
         }
+        for (int i = offset.length; i < record.length; ++i) {
+            byte[] rightData = right.getFieldData(i - offset.length);
+            int rightOffset = right.getFieldStart(i - offset.length);
+            bbis.setByteArray(rightData, rightOffset);
+
+            Object instance = recordDescriptor.getFields()[i].deserialize(di);
+            if (LOGGER.isLoggable(Level.FINEST)) {
+                LOGGER.finest(i + " " + instance);
+            }
+            record[i] = instance;
+            if (FrameConstants.DEBUG_FRAME_IO) {
+                try {
+                    if (di.readInt() != FrameConstants.FRAME_FIELD_MAGIC) {
+                        throw new HyracksDataException("Field magic mismatch");
+                    }
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
         return record;
     }
 }
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBuffer.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBuffer.java
new file mode 100644
index 0000000..9a30647
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBuffer.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package edu.uci.ics.pregelix.dataflow.util;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+
+/**
+ * The buffer to hold updates.
+ * We do a batch update for the B-tree during index search and join so that
+ * avoid to open/close cursors frequently.
+ */
+public class UpdateBuffer {
+
+    private int currentInUse = 0;
+    private final int pageLimit;
+    private final List<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
+    private final FrameTupleAppender appender;
+    private final IHyracksTaskContext ctx;
+    private final FrameTupleReference tuple = new FrameTupleReference();
+    private final int frameSize;
+    private IFrameTupleAccessor fta;
+
+    public UpdateBuffer(int numPages, IHyracksTaskContext ctx, int fieldCount) {
+        this.appender = new FrameTupleAppender(ctx.getFrameSize());
+        ByteBuffer buffer = ctx.allocateFrame();
+        this.buffers.add(buffer);
+        this.appender.reset(buffer, true);
+        this.pageLimit = numPages;
+        this.ctx = ctx;
+        this.frameSize = ctx.getFrameSize();
+        this.fta = new UpdateBufferTupleAccessor(frameSize, fieldCount);
+    }
+
+    public UpdateBuffer(IHyracksTaskContext ctx, int fieldCount) {
+        //by default, the update buffer has 1000 pages
+        this(1000, ctx, fieldCount);
+    }
+
+    public void setFieldCount(int fieldCount) {
+        if (fta.getFieldCount() != fieldCount) {
+            this.fta = new UpdateBufferTupleAccessor(frameSize, fieldCount);
+        }
+    }
+
+    public boolean appendTuple(ArrayTupleBuilder tb) throws HyracksDataException {
+        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+            if (currentInUse + 1 < pageLimit) {
+                // move to the new buffer
+                currentInUse++;
+                allocate(currentInUse);
+                ByteBuffer buffer = buffers.get(currentInUse);
+                appender.reset(buffer, true);
+
+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                    throw new HyracksDataException("tuple cannot be appended to a new frame!");
+                }
+                return true;
+            } else {
+                return false;
+            }
+        } else {
+            return true;
+        }
+    }
+
+    public void updateBTree(ITreeIndexAccessor bta) throws HyracksDataException, IndexException {
+        // batch update
+        for (int i = 0; i <= currentInUse; i++) {
+            ByteBuffer buffer = buffers.get(i);
+            fta.reset(buffer);
+            for (int j = 0; j < fta.getTupleCount(); j++) {
+                tuple.reset(fta, j);
+                bta.update(tuple);
+            }
+        }
+
+        //cleanup the buffer
+        currentInUse = 0;
+        ByteBuffer buffer = buffers.get(0);
+        appender.reset(buffer, true);
+    }
+
+    private void allocate(int index) {
+        if (index >= buffers.size()) {
+            buffers.add(ctx.allocateFrame());
+        }
+    }
+}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBufferTupleAccessor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBufferTupleAccessor.java
new file mode 100644
index 0000000..39f1361
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/UpdateBufferTupleAccessor.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.util;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.FrameHelper;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+
+public final class UpdateBufferTupleAccessor implements IFrameTupleAccessor {
+    private final int frameSize;
+    private final int fieldCount;
+    private ByteBuffer buffer;
+
+    public UpdateBufferTupleAccessor(int frameSize, int fieldCount) {
+        this.frameSize = frameSize;
+        this.fieldCount = fieldCount;
+    }
+
+    @Override
+    public void reset(ByteBuffer buffer) {
+        this.buffer = buffer;
+    }
+
+    @Override
+    public ByteBuffer getBuffer() {
+        return buffer;
+    }
+
+    @Override
+    public int getTupleCount() {
+        return buffer.getInt(FrameHelper.getTupleCountOffset(frameSize));
+    }
+
+    @Override
+    public int getTupleStartOffset(int tupleIndex) {
+        return tupleIndex == 0 ? 0 : buffer.getInt(FrameHelper.getTupleCountOffset(frameSize) - 4 * tupleIndex);
+    }
+
+    @Override
+    public int getTupleEndOffset(int tupleIndex) {
+        return buffer.getInt(FrameHelper.getTupleCountOffset(frameSize) - 4 * (tupleIndex + 1));
+    }
+
+    @Override
+    public int getFieldStartOffset(int tupleIndex, int fIdx) {
+        return fIdx == 0 ? 0 : buffer.getInt(getTupleStartOffset(tupleIndex) + (fIdx - 1) * 4);
+    }
+
+    @Override
+    public int getFieldEndOffset(int tupleIndex, int fIdx) {
+        return buffer.getInt(getTupleStartOffset(tupleIndex) + fIdx * 4);
+    }
+
+    @Override
+    public int getFieldLength(int tupleIndex, int fIdx) {
+        return getFieldEndOffset(tupleIndex, fIdx) - getFieldStartOffset(tupleIndex, fIdx);
+    }
+
+    @Override
+    public int getFieldSlotsLength() {
+        return getFieldCount() * 4;
+    }
+
+    @Override
+    public int getFieldCount() {
+        return fieldCount;
+    }
+}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow/pom.xml b/pregelix/pregelix-dataflow/pom.xml
index aaa7186..d3f396e 100644
--- a/pregelix/pregelix-dataflow/pom.xml
+++ b/pregelix/pregelix-dataflow/pom.xml
@@ -1,14 +1,15 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 	<modelVersion>4.0.0</modelVersion>
 	<artifactId>pregelix-dataflow</artifactId>
 	<packaging>jar</packaging>
 	<name>pregelix-dataflow</name>
 
 	<parent>
-    		<groupId>edu.uci.ics.hyracks</groupId>
-    		<artifactId>pregelix</artifactId>
-    		<version>0.2.3-SNAPSHOT</version>
-  	</parent>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<artifactId>pregelix</artifactId>
+		<version>0.2.3-SNAPSHOT</version>
+	</parent>
 
 
 	<properties>
@@ -22,8 +23,9 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
 				</configuration>
 			</plugin>
 			<plugin>
@@ -42,6 +44,7 @@
 			</plugin>
 			<plugin>
 				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
 				<configuration>
 					<filesets>
 						<fileset>
@@ -102,13 +105,6 @@
 			<version>0.2.3-SNAPSHOT</version>
 		</dependency>
 		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-core</artifactId>
-			<version>0.20.2</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-storage-am-common</artifactId>
 			<version>0.2.3-SNAPSHOT</version>
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/ConnectorPolicyAssignmentPolicy.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/ConnectorPolicyAssignmentPolicy.java
index d29afca..ae47ed8 100644
--- a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/ConnectorPolicyAssignmentPolicy.java
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/ConnectorPolicyAssignmentPolicy.java
@@ -15,25 +15,44 @@
 
 package edu.uci.ics.pregelix.dataflow;
 
+import org.apache.commons.lang3.tuple.Pair;
+
 import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
 import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
 import edu.uci.ics.hyracks.api.dataflow.connectors.PipeliningConnectorPolicy;
+import edu.uci.ics.hyracks.api.dataflow.connectors.SendSideMaterializedBlockingConnectorPolicy;
 import edu.uci.ics.hyracks.api.dataflow.connectors.SendSideMaterializedPipeliningConnectorPolicy;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
 
 public class ConnectorPolicyAssignmentPolicy implements IConnectorPolicyAssignmentPolicy {
     private static final long serialVersionUID = 1L;
-    private IConnectorPolicy senderSideMaterializePolicy = new SendSideMaterializedPipeliningConnectorPolicy();
+    private IConnectorPolicy senderSideMatPipPolicy = new SendSideMaterializedPipeliningConnectorPolicy();
+    private IConnectorPolicy senderSideMatBlkPolicy = new SendSideMaterializedBlockingConnectorPolicy();
     private IConnectorPolicy pipeliningPolicy = new PipeliningConnectorPolicy();
+    private JobSpecification spec;
+
+    public ConnectorPolicyAssignmentPolicy(JobSpecification spec) {
+        this.spec = spec;
+    }
 
     @Override
     public IConnectorPolicy getConnectorPolicyAssignment(IConnectorDescriptor c, int nProducers, int nConsumers,
             int[] fanouts) {
         if (c instanceof MToNPartitioningMergingConnectorDescriptor) {
-            return senderSideMaterializePolicy;
+            return senderSideMatPipPolicy;
         } else {
-            return pipeliningPolicy;
+            Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> endPoints = spec
+                    .getConnectorOperatorMap().get(c.getConnectorId());
+            IOperatorDescriptor consumer = endPoints.getRight().getLeft();
+            if (consumer instanceof TreeIndexInsertUpdateDeleteOperatorDescriptor) {
+                return senderSideMatBlkPolicy;
+            } else {
+                return pipeliningPolicy;
+            }
         }
     }
 }
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/HDFSFileWriteOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/HDFSFileWriteOperatorDescriptor.java
index c25e4c6..2402748 100644
--- a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/HDFSFileWriteOperatorDescriptor.java
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/HDFSFileWriteOperatorDescriptor.java
@@ -14,6 +14,8 @@
  */
 package edu.uci.ics.pregelix.dataflow;
 
+import java.io.File;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
@@ -23,9 +25,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
@@ -37,6 +37,7 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameDeserializer;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
 import edu.uci.ics.pregelix.api.graph.Vertex;
 import edu.uci.ics.pregelix.api.io.VertexOutputFormat;
 import edu.uci.ics.pregelix.api.io.VertexWriter;
@@ -64,21 +65,24 @@
         return new AbstractUnaryInputSinkOperatorNodePushable() {
             private RecordDescriptor rd0;
             private FrameDeserializer frameDeserializer;
-            private Configuration conf = confFactory.createConfiguration();
+            private Configuration conf;
             private VertexWriter vertexWriter;
             private TaskAttemptContext context;
             private String TEMP_DIR = "_temporary";
+            private ClassLoader ctxCL;
+            private ContextFactory ctxFactory = new ContextFactory();
 
             @Override
             public void open() throws HyracksDataException {
                 rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)
                         : inputRdFactory.createRecordDescriptor();
                 frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0);
+                ctxCL = Thread.currentThread().getContextClassLoader();
                 Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+                conf = confFactory.createConfiguration();
 
                 VertexOutputFormat outputFormat = BspUtils.createVertexOutputFormat(conf);
-                TaskAttemptID tid = new TaskAttemptID("", 0, true, partition, 0);
-                context = new TaskAttemptContext(conf, tid);
+                context = ctxFactory.createContext(conf, partition);
                 try {
                     vertexWriter = outputFormat.createVertexWriter(context);
                 } catch (InterruptedException e) {
@@ -107,7 +111,7 @@
 
             @Override
             public void fail() throws HyracksDataException {
-
+                Thread.currentThread().setContextClassLoader(ctxCL);
             }
 
             @Override
@@ -124,36 +128,63 @@
 
             private void moveFilesToFinalPath() throws HyracksDataException {
                 try {
-                    JobContext job = new JobContext(conf, new JobID("0", 0));
+                    JobContext job = ctxFactory.createJobContext(conf);
                     Path outputPath = FileOutputFormat.getOutputPath(job);
                     FileSystem dfs = FileSystem.get(conf);
                     Path filePath = new Path(outputPath, "part-" + new Integer(partition).toString());
-                    FileStatus[] tempPaths = dfs.listStatus(outputPath, new PathFilter() {
-                        @Override
-                        public boolean accept(Path dir) {
-                            return dir.getName().endsWith(TEMP_DIR);
-                        }
-                    });
-                    Path tempDir = tempPaths[0].getPath();
-                    FileStatus[] results = dfs.listStatus(tempDir, new PathFilter() {
-                        @Override
-                        public boolean accept(Path dir) {
-                            return dir.getName().indexOf(context.getTaskAttemptID().toString()) >= 0;
-                        }
-                    });
-                    Path srcDir = results[0].getPath();
-                    if (!dfs.exists(srcDir))
-                        throw new HyracksDataException("file " + srcDir.toString() + " does not exist!");
-
-                    FileStatus[] srcFiles = dfs.listStatus(srcDir);
-                    Path srcFile = srcFiles[0].getPath();
-                    dfs.delete(filePath, true);
-                    dfs.rename(srcFile, filePath);
+                    FileStatus[] results = findPartitionPaths(outputPath, dfs);
+                    if (results.length >= 1) {
+                        /**
+                         * for Hadoop-0.20.2
+                         */
+                        renameFile(dfs, filePath, results);
+                    } else {
+                        /**
+                         * for Hadoop-0.23.1
+                         */
+                        int jobId = job.getJobID().getId();
+                        outputPath = new Path(outputPath.toString() + File.separator + TEMP_DIR + File.separator
+                                + jobId);
+                        results = findPartitionPaths(outputPath, dfs);
+                        renameFile(dfs, filePath, results);
+                    }
                 } catch (IOException e) {
                     throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
                 }
             }
 
+            private FileStatus[] findPartitionPaths(Path outputPath, FileSystem dfs) throws FileNotFoundException,
+                    IOException {
+                FileStatus[] tempPaths = dfs.listStatus(outputPath, new PathFilter() {
+                    @Override
+                    public boolean accept(Path dir) {
+                        return dir.getName().endsWith(TEMP_DIR);
+                    }
+                });
+                Path tempDir = tempPaths[0].getPath();
+                FileStatus[] results = dfs.listStatus(tempDir, new PathFilter() {
+                    @Override
+                    public boolean accept(Path dir) {
+                        return dir.getName().indexOf(context.getTaskAttemptID().toString()) >= 0;
+                    }
+                });
+                return results;
+            }
+
+            private void renameFile(FileSystem dfs, Path filePath, FileStatus[] results) throws IOException,
+                    HyracksDataException, FileNotFoundException {
+                Path srcDir = results[0].getPath();
+                if (!dfs.exists(srcDir))
+                    throw new HyracksDataException("file " + srcDir.toString() + " does not exist!");
+
+                FileStatus[] srcFiles = dfs.listStatus(srcDir);
+                Path srcFile = srcFiles[0].getPath();
+                dfs.delete(filePath, true);
+                dfs.rename(srcFile, filePath);
+            }
+
         };
     }
 }
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexFileScanOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexFileScanOperatorDescriptor.java
index f1b98f5..0da7baf 100644
--- a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexFileScanOperatorDescriptor.java
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexFileScanOperatorDescriptor.java
@@ -17,17 +17,15 @@
 import java.io.DataOutput;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
-import java.util.logging.Logger;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Mapper.Context;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
@@ -42,6 +40,8 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
+import edu.uci.ics.hyracks.hdfs2.dataflow.FileSplitsFactory;
 import edu.uci.ics.pregelix.api.graph.Vertex;
 import edu.uci.ics.pregelix.api.io.VertexInputFormat;
 import edu.uci.ics.pregelix.api.io.VertexReader;
@@ -50,38 +50,67 @@
 
 @SuppressWarnings("rawtypes")
 public class VertexFileScanOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-    private static final Logger LOGGER = Logger.getLogger(VertexFileScanOperatorDescriptor.class.getName());
     private static final long serialVersionUID = 1L;
-    private final List<InputSplit> splits;
+    private final FileSplitsFactory splitsFactory;
     private final IConfigurationFactory confFactory;
     private final int fieldSize = 2;
+    private final String[] scheduledLocations;
+    private final boolean[] executed;
 
     /**
      * @param spec
      */
     public VertexFileScanOperatorDescriptor(JobSpecification spec, RecordDescriptor rd, List<InputSplit> splits,
-            IConfigurationFactory confFactory) throws HyracksException {
+            String[] scheduledLocations, IConfigurationFactory confFactory) throws HyracksException {
         super(spec, 0, 1);
-        this.splits = splits;
+        List<FileSplit> fileSplits = new ArrayList<FileSplit>();
+        for (int i = 0; i < splits.size(); i++) {
+            fileSplits.add((FileSplit) splits.get(i));
+        }
+        this.splitsFactory = new FileSplitsFactory(fileSplits);
         this.confFactory = confFactory;
+        this.scheduledLocations = scheduledLocations;
+        this.executed = new boolean[scheduledLocations.length];
+        Arrays.fill(executed, false);
         this.recordDescriptors[0] = rd;
     }
 
     public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
             IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
             throws HyracksDataException {
+        final List<FileSplit> splits = splitsFactory.getSplits();
+
         return new AbstractUnaryOutputSourceOperatorNodePushable() {
-            private Configuration conf = confFactory.createConfiguration();
+            private ClassLoader ctxCL;
+            private ContextFactory ctxFactory = new ContextFactory();
 
             @Override
             public void initialize() throws HyracksDataException {
+                ctxCL = Thread.currentThread().getContextClassLoader();
                 try {
                     Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
+                    Configuration conf = confFactory.createConfiguration();
                     writer.open();
-                    loadVertices(ctx, partition);
+                    for (int i = 0; i < scheduledLocations.length; i++) {
+                        if (scheduledLocations[i].equals(ctx.getJobletContext().getApplicationContext().getNodeId())) {
+                            /**
+                             * pick one from the FileSplit queue
+                             */
+                            synchronized (executed) {
+                                if (!executed[i]) {
+                                    executed[i] = true;
+                                } else {
+                                    continue;
+                                }
+                            }
+                            loadVertices(ctx, conf, i);
+                        }
+                    }
                     writer.close();
                 } catch (Exception e) {
                     throw new HyracksDataException(e);
+                } finally {
+                    Thread.currentThread().setContextClassLoader(ctxCL);
                 }
             }
 
@@ -96,24 +125,19 @@
              * @throws InterruptedException
              */
             @SuppressWarnings("unchecked")
-            private void loadVertices(final IHyracksTaskContext ctx, int partitionId) throws IOException,
-                    ClassNotFoundException, InterruptedException, InstantiationException, IllegalAccessException {
+            private void loadVertices(final IHyracksTaskContext ctx, Configuration conf, int splitId)
+                    throws IOException, ClassNotFoundException, InterruptedException, InstantiationException,
+                    IllegalAccessException {
                 ByteBuffer frame = ctx.allocateFrame();
                 FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
                 appender.reset(frame, true);
 
                 VertexInputFormat vertexInputFormat = BspUtils.createVertexInputFormat(conf);
-                TaskAttemptContext context = new TaskAttemptContext(conf, new TaskAttemptID());
-                InputSplit split = splits.get(partition);
+                InputSplit split = splits.get(splitId);
+                TaskAttemptContext mapperContext = ctxFactory.createContext(conf, splitId);
 
-                if (split instanceof FileSplit) {
-                    FileSplit fileSplit = (FileSplit) split;
-                    LOGGER.info("read file split: " + fileSplit.getPath() + " location:" + fileSplit.getLocations()[0]
-                            + " start:" + fileSplit.getStart() + " length:" + split.getLength() + " partition:"
-                            + partition);
-                }
-                VertexReader vertexReader = vertexInputFormat.createVertexReader(split, context);
-                vertexReader.initialize(split, context);
+                VertexReader vertexReader = vertexInputFormat.createVertexReader(split, mapperContext);
+                vertexReader.initialize(split, mapperContext);
                 Vertex readerVertex = (Vertex) BspUtils.createVertex(conf);
                 ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldSize);
                 DataOutput dos = tb.getDataOutput();
@@ -121,8 +145,6 @@
                 /**
                  * set context
                  */
-                Context mapperContext = new Mapper().new Context(conf, new TaskAttemptID(), null, null, null, null,
-                        splits.get(partition));
                 Vertex.setContext(mapperContext);
 
                 /**
@@ -166,5 +188,4 @@
             }
         };
     }
-
 }
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexWriteOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexWriteOperatorDescriptor.java
new file mode 100644
index 0000000..d7cbb3a
--- /dev/null
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexWriteOperatorDescriptor.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.util.StringSerializationUtils;
+import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
+import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
+
+public class VertexWriteOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+    private final FileSplit[] splits;
+    private final IRuntimeHookFactory preHookFactory;
+    private final IRuntimeHookFactory postHookFactory;
+    private final IRecordDescriptorFactory inputRdFactory;
+
+    public VertexWriteOperatorDescriptor(JobSpecification spec, IRecordDescriptorFactory inputRdFactory,
+            IFileSplitProvider fileSplitProvider, IRuntimeHookFactory preHookFactory,
+            IRuntimeHookFactory postHookFactory) {
+        super(spec, 1, 0);
+        this.splits = fileSplitProvider.getFileSplits();
+        this.preHookFactory = preHookFactory;
+        this.postHookFactory = postHookFactory;
+        this.inputRdFactory = inputRdFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
+            throws HyracksDataException {
+        IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
+            private RecordDescriptor rd0;
+            private FrameDeserializer frameDeserializer;
+            private PrintWriter outputWriter;
+
+            @Override
+            public void open() throws HyracksDataException {
+                rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)
+                        : inputRdFactory.createRecordDescriptor();
+                frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0);
+                try {
+                    outputWriter = new PrintWriter(new OutputStreamWriter(new FileOutputStream(splits[partition]
+                            .getLocalFile().getFile())));
+                    if (preHookFactory != null)
+                        preHookFactory.createRuntimeHook().configure(ctx);
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+
+            @Override
+            public void nextFrame(ByteBuffer frame) throws HyracksDataException {
+                frameDeserializer.reset(frame);
+                while (!frameDeserializer.done()) {
+                    Object[] tuple = frameDeserializer.deserializeRecord();
+                    // output the vertex
+                    outputWriter.print(StringSerializationUtils.toString(tuple[tuple.length - 1]));
+                    outputWriter.println();
+                }
+            }
+
+            @Override
+            public void fail() throws HyracksDataException {
+
+            }
+
+            @Override
+            public void close() throws HyracksDataException {
+                if (postHookFactory != null)
+                    postHookFactory.createRuntimeHook().configure(ctx);
+                outputWriter.close();
+            }
+
+        };
+        return op;
+    }
+}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/RuntimeContext.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/RuntimeContext.java
index 43b6d17..567e220 100644
--- a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/RuntimeContext.java
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/context/RuntimeContext.java
@@ -119,23 +119,23 @@
             Vertex.setNumEdges(numEdges);
             giraphJobIdToSuperStep.put(giraphJobId, superStep);
             giraphJobIdToMove.put(giraphJobId, false);
-            LOGGER.info("start iteration " + Vertex.getCurrentSuperstep());
+            LOGGER.info("start iteration " + Vertex.getSuperstep());
         }
         System.gc();
     }
 
     public synchronized void endSuperStep(String giraphJobId) {
         giraphJobIdToMove.put(giraphJobId, true);
-        LOGGER.info("end iteration " + Vertex.getCurrentSuperstep());
+        LOGGER.info("end iteration " + Vertex.getSuperstep());
     }
 
     @Override
     public FileReference createManagedWorkspaceFile(String prefix) throws HyracksDataException {
         final FileReference fRef = ioManager.createWorkspaceFile(prefix);
-        List<FileReference> files = iterationToFiles.get(Vertex.getCurrentSuperstep());
+        List<FileReference> files = iterationToFiles.get(Vertex.getSuperstep());
         if (files == null) {
             files = new ArrayList<FileReference>();
-            iterationToFiles.put(Vertex.getCurrentSuperstep(), files);
+            iterationToFiles.put(Vertex.getSuperstep(), files);
         }
         files.add(fRef);
         return fRef;
diff --git a/pregelix/pregelix-dist/pom.xml b/pregelix/pregelix-dist/pom.xml
index 4cb0387..847e843 100644
--- a/pregelix/pregelix-dist/pom.xml
+++ b/pregelix/pregelix-dist/pom.xml
@@ -19,8 +19,8 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
 				</configuration>
 			</plugin>
 			<plugin>
@@ -70,7 +70,7 @@
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>pregelix-core</artifactId>
 			<version>0.2.3-SNAPSHOT</version>
-			<scope>comile</scope>
+			<scope>compile</scope>
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
diff --git a/pregelix/pregelix-example/data/clique/clique.txt b/pregelix/pregelix-example/data/clique/clique.txt
new file mode 100755
index 0000000..08280e3
--- /dev/null
+++ b/pregelix/pregelix-example/data/clique/clique.txt
@@ -0,0 +1,7 @@
+1 2 3 4
+2 1 3 4 5
+3 1 2 4 5
+4 1 2 3
+5 6 7
+6 5 7
+7 5 6
diff --git a/pregelix/pregelix-example/pom.xml b/pregelix/pregelix-example/pom.xml
index 6b9bc89..e643331 100644
--- a/pregelix/pregelix-example/pom.xml
+++ b/pregelix/pregelix-example/pom.xml
@@ -17,8 +17,9 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
 				</configuration>
 			</plugin>
 			<plugin>
@@ -41,6 +42,7 @@
 			<plugin>
 				<groupId>org.codehaus.mojo</groupId>
 				<artifactId>appassembler-maven-plugin</artifactId>
+				<version>1.3</version>
 				<executions>
 					<execution>
 						<configuration>
@@ -75,7 +77,9 @@
 				</configuration>
 			</plugin>
 			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
 				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
 				<configuration>
 					<filesets>
 						<fileset>
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ConnectedComponentsVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ConnectedComponentsVertex.java
index 30e88ea..74ae455 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ConnectedComponentsVertex.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ConnectedComponentsVertex.java
@@ -126,6 +126,11 @@
         }
     }
 
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
     public static void main(String[] args) throws Exception {
         PregelixJob job = new PregelixJob(ConnectedComponentsVertex.class.getSimpleName());
         job.setVertexClass(ConnectedComponentsVertex.class);
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/GraphMutationVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/GraphMutationVertex.java
new file mode 100644
index 0000000..e54373f
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/GraphMutationVertex.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexWriter;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat.TextVertexWriter;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * Demonstrates the basic graph vertex insert/delete implementation.
+ */
+public class GraphMutationVertex extends Vertex<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> {
+
+    private VLongWritable vid = new VLongWritable();
+    private GraphMutationVertex newVertex = null;
+
+    @Override
+    public void compute(Iterator<DoubleWritable> msgIterator) {
+        if (Vertex.getSuperstep() == 1) {
+            if (newVertex == null) {
+                newVertex = new GraphMutationVertex();
+            }
+            if (getVertexId().get() % 2 == 0 || getVertexId().get() % 3 == 0) {
+                deleteVertex(getVertexId());
+            } else {
+                vid.set(100 * getVertexId().get());
+                newVertex.setVertexId(vid);
+                newVertex.setVertexValue(getVertexValue());
+                addVertex(vid, newVertex);
+            }
+            voteToHalt();
+        } else {
+            if (getVertexId().get() % 190 == 0) {
+                deleteVertex(getVertexId());
+            }
+            voteToHalt();
+        }
+    }
+
+    /**
+     * Simple VertexWriter that supports {@link SimplePageRankVertex}
+     */
+    public static class SimpleGraphMutationVertexWriter extends
+            TextVertexWriter<VLongWritable, DoubleWritable, FloatWritable> {
+        public SimpleGraphMutationVertexWriter(RecordWriter<Text, Text> lineRecordWriter) {
+            super(lineRecordWriter);
+        }
+
+        @Override
+        public void writeVertex(Vertex<VLongWritable, DoubleWritable, FloatWritable, ?> vertex) throws IOException,
+                InterruptedException {
+            getRecordWriter().write(new Text(vertex.getVertexId().toString()),
+                    new Text(vertex.getVertexValue().toString()));
+        }
+    }
+
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
+    /**
+     * Simple VertexOutputFormat that supports {@link SimplePageRankVertex}
+     */
+    public static class SimpleGraphMutationVertexOutputFormat extends
+            TextVertexOutputFormat<VLongWritable, DoubleWritable, FloatWritable> {
+
+        @Override
+        public VertexWriter<VLongWritable, DoubleWritable, FloatWritable> createVertexWriter(TaskAttemptContext context)
+                throws IOException, InterruptedException {
+            RecordWriter<Text, Text> recordWriter = textOutputFormat.getRecordWriter(context);
+            return new SimpleGraphMutationVertexWriter(recordWriter);
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        PregelixJob job = new PregelixJob(GraphMutationVertex.class.getSimpleName());
+        job.setVertexClass(GraphMutationVertex.class);
+        job.setVertexInputFormatClass(TextPageRankInputFormat.class);
+        job.setVertexOutputFormatClass(SimpleGraphMutationVertexOutputFormat.class);
+        Client.run(args, job);
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/PageRankVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/PageRankVertex.java
index 290f90e..b6d4da7 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/PageRankVertex.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/PageRankVertex.java
@@ -54,6 +54,7 @@
     public static final String ITERATIONS = "HyracksPageRankVertex.iteration";
     private DoubleWritable outputValue = new DoubleWritable();
     private DoubleWritable tmpVertexValue = new DoubleWritable();
+    private int maxIteration = -1;
 
     /**
      * Test whether combiner is called by summing up the messages.
@@ -97,7 +98,9 @@
 
     @Override
     public void compute(Iterator<DoubleWritable> msgIterator) {
-        int maxIteration = this.getContext().getConfiguration().getInt(ITERATIONS, 10);
+        if (maxIteration < 0) {
+            maxIteration = getContext().getConfiguration().getInt(ITERATIONS, 10);
+        }
         if (getSuperstep() == 1) {
             tmpVertexValue.set(1.0 / getNumVertices());
             setVertexValue(tmpVertexValue);
@@ -123,13 +126,13 @@
     /**
      * Simple VertexReader that supports {@link SimplePageRankVertex}
      */
-    public static class SimplePageRankVertexReader extends
+    public static class SimulatedPageRankVertexReader extends
             GeneratedVertexReader<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> {
         /** Class logger */
-        private static final Logger LOG = Logger.getLogger(SimplePageRankVertexReader.class.getName());
+        private static final Logger LOG = Logger.getLogger(SimulatedPageRankVertexReader.class.getName());
         private Map<VLongWritable, FloatWritable> edges = Maps.newHashMap();
 
-        public SimplePageRankVertexReader() {
+        public SimulatedPageRankVertexReader() {
             super();
         }
 
@@ -162,12 +165,12 @@
     /**
      * Simple VertexInputFormat that supports {@link SimplePageRankVertex}
      */
-    public static class SimplePageRankVertexInputFormat extends
+    public static class SimulatedPageRankVertexInputFormat extends
             GeneratedVertexInputFormat<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> {
         @Override
         public VertexReader<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> createVertexReader(
                 InputSplit split, TaskAttemptContext context) throws IOException {
-            return new SimplePageRankVertexReader();
+            return new SimulatedPageRankVertexReader();
         }
     }
 
@@ -188,6 +191,11 @@
         }
     }
 
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
     /**
      * Simple VertexOutputFormat that supports {@link SimplePageRankVertex}
      */
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ReachabilityVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ReachabilityVertex.java
index 2f0ca45..0895386 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ReachabilityVertex.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ReachabilityVertex.java
@@ -85,6 +85,7 @@
     }
 
     private ByteWritable tmpVertexValue = new ByteWritable();
+    private long sourceId = -1;
 
     /** The source vertex id */
     public static final String SOURCE_ID = "ReachibilityVertex.sourceId";
@@ -101,7 +102,7 @@
      * @return True if the source id
      */
     private boolean isSource(VLongWritable v) {
-        return (v.get() == getContext().getConfiguration().getLong(SOURCE_ID, SOURCE_ID_DEFAULT));
+        return (v.get() == sourceId);
     }
 
     /**
@@ -115,6 +116,9 @@
 
     @Override
     public void compute(Iterator<ByteWritable> msgIterator) {
+        if (sourceId < 0) {
+            sourceId = getContext().getConfiguration().getLong(SOURCE_ID, SOURCE_ID_DEFAULT);
+        }
         if (getSuperstep() == 1) {
             boolean isSource = isSource(getVertexId());
             if (isSource) {
@@ -161,6 +165,11 @@
         voteToHalt();
     }
 
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
     private void signalTerminate() {
         Configuration conf = getContext().getConfiguration();
         try {
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ShortestPathsVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ShortestPathsVertex.java
index a018f08..199870e 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ShortestPathsVertex.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ShortestPathsVertex.java
@@ -126,6 +126,11 @@
         }
         voteToHalt();
     }
+    
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
 
     public static void main(String[] args) throws Exception {
         PregelixJob job = new PregelixJob(ShortestPathsVertex.class.getSimpleName());
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/VertexAggregator.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/VertexAggregator.java
deleted file mode 100644
index 68b7cca..0000000
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/VertexAggregator.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.pregelix.example;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.FileOutputFormat;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MapReduceBase;
-import org.apache.hadoop.mapred.Mapper;
-import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.Reducer;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.TextInputFormat;
-
-@SuppressWarnings("deprecation")
-public class VertexAggregator {
-
-    public static class MapRecordOnly extends MapReduceBase implements
-            Mapper<LongWritable, Text, NullWritable, LongWritable> {
-        private final NullWritable nullValue = NullWritable.get();
-        private final LongWritable count = new LongWritable(1);
-
-        public void map(LongWritable id, Text inputValue, OutputCollector<NullWritable, LongWritable> output,
-                Reporter reporter) throws IOException {
-            output.collect(nullValue, count);
-        }
-    }
-
-    public static class CombineRecordOnly extends MapReduceBase implements
-            Reducer<NullWritable, LongWritable, NullWritable, LongWritable> {
-        private final NullWritable nullValue = NullWritable.get();
-
-        public void reduce(NullWritable inputKey, Iterator<LongWritable> inputValue,
-                OutputCollector<NullWritable, LongWritable> output, Reporter reporter) throws IOException {
-            long count = 0;
-            while (inputValue.hasNext())
-                count += inputValue.next().get();
-            output.collect(nullValue, new LongWritable(count));
-        }
-    }
-
-    public static class ReduceRecordOnly extends MapReduceBase implements
-            Reducer<NullWritable, LongWritable, NullWritable, Text> {
-        private final NullWritable nullValue = NullWritable.get();
-
-        public void reduce(NullWritable inputKey, Iterator<LongWritable> inputValue,
-                OutputCollector<NullWritable, Text> output, Reporter reporter) throws IOException {
-            long count = 0;
-            while (inputValue.hasNext())
-                count += inputValue.next().get();
-            output.collect(nullValue, new Text(Long.toString(count)));
-        }
-    }
-
-    public static void main(String[] args) throws IOException {
-        JobConf job = new JobConf(VertexAggregator.class);
-
-        job.setJobName(VertexAggregator.class.getSimpleName());
-        job.setMapperClass(MapRecordOnly.class);
-        job.setCombinerClass(CombineRecordOnly.class);
-        job.setReducerClass(ReduceRecordOnly.class);
-        job.setMapOutputKeyClass(NullWritable.class);
-        job.setMapOutputValueClass(LongWritable.class);
-
-        job.setInputFormat(TextInputFormat.class);
-        FileInputFormat.setInputPaths(job, args[0]);
-        FileOutputFormat.setOutputPath(job, new Path(args[1]));
-        job.setNumReduceTasks(Integer.parseInt(args[2]));
-        JobClient.runJob(job);
-    }
-}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/VertexSorter.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/VertexSorter.java
deleted file mode 100644
index 1dd6922..0000000
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/VertexSorter.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.pregelix.example;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.FileOutputFormat;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MapReduceBase;
-import org.apache.hadoop.mapred.Mapper;
-import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.Reducer;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.TextInputFormat;
-
-@SuppressWarnings("deprecation")
-public class VertexSorter {
-    public static class MapRecordOnly extends MapReduceBase implements Mapper<LongWritable, Text, LongWritable, Text> {
-        private static String separator = " ";
-
-        public void map(LongWritable id, Text inputValue, OutputCollector<LongWritable, Text> output, Reporter reporter)
-                throws IOException {
-            String[] fields = inputValue.toString().split(separator);
-            LongWritable vertexId = new LongWritable(Long.parseLong(fields[0]));
-            output.collect(vertexId, inputValue);
-        }
-    }
-
-    public static class ReduceRecordOnly extends MapReduceBase implements
-            Reducer<LongWritable, Text, NullWritable, Text> {
-
-        NullWritable key = NullWritable.get();
-
-        public void reduce(LongWritable inputKey, Iterator<Text> inputValue,
-                OutputCollector<NullWritable, Text> output, Reporter reporter) throws IOException {
-            while (inputValue.hasNext())
-                output.collect(key, inputValue.next());
-        }
-    }
-
-    public static void main(String[] args) throws IOException {
-        JobConf job = new JobConf(VertexSorter.class);
-
-        job.setJobName(VertexSorter.class.getSimpleName());
-        job.setMapperClass(MapRecordOnly.class);
-        job.setReducerClass(ReduceRecordOnly.class);
-        job.setMapOutputKeyClass(LongWritable.class);
-        job.setMapOutputValueClass(Text.class);
-
-        job.setInputFormat(TextInputFormat.class);
-        FileInputFormat.setInputPaths(job, args[0]);
-        FileOutputFormat.setOutputPath(job, new Path(args[1]));
-        job.setNumReduceTasks(Integer.parseInt(args[2]));
-        JobClient.runJob(job);
-    }
-}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/AdjacencyListWritable.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/AdjacencyListWritable.java
new file mode 100644
index 0000000..83e0a6b
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/AdjacencyListWritable.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.maximalclique;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * The adjacency list contains <src, list-of-neighbors>
+ */
+public class AdjacencyListWritable implements Writable {
+
+    private VLongWritable sourceVertex = new VLongWritable();
+    private Set<VLongWritable> destinationVertexes = new TreeSet<VLongWritable>();
+
+    public AdjacencyListWritable() {
+    }
+
+    public void reset() {
+        this.destinationVertexes.clear();
+    }
+
+    public void setSource(VLongWritable source) {
+        this.sourceVertex = source;
+    }
+
+    public void addNeighbor(VLongWritable neighbor) {
+        destinationVertexes.add(neighbor);
+    }
+
+    @Override
+    public void readFields(DataInput input) throws IOException {
+        sourceVertex = new VLongWritable();
+        destinationVertexes.clear();
+        sourceVertex.readFields(input);
+        int numberOfNeighbors = input.readInt();
+        for (int i = 0; i < numberOfNeighbors; i++) {
+            VLongWritable neighbor = new VLongWritable();
+            neighbor.readFields(input);
+            destinationVertexes.add(neighbor);
+        }
+    }
+
+    @Override
+    public void write(DataOutput output) throws IOException {
+        sourceVertex.write(output);
+        output.writeInt(destinationVertexes.size());
+        for (VLongWritable dest : destinationVertexes) {
+            dest.write(output);
+        }
+    }
+
+    public int numberOfNeighbors() {
+        return destinationVertexes.size();
+    }
+
+    public void removeNeighbor(VLongWritable v) {
+        destinationVertexes.remove(v);
+    }
+
+    public VLongWritable getSource() {
+        return sourceVertex;
+    }
+
+    public Iterator<VLongWritable> getNeighbors() {
+        return destinationVertexes.iterator();
+    }
+
+    public void cleanNonMatch(Collection<VLongWritable> matches) {
+        destinationVertexes.retainAll(matches);
+    }
+
+    public boolean isNeighbor(VLongWritable v) {
+        return destinationVertexes.contains(v);
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/CliquesWritable.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/CliquesWritable.java
new file mode 100644
index 0000000..0e22ea1
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/CliquesWritable.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.maximalclique;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * The representation of cliques stored in a vertex.
+ */
+public class CliquesWritable implements Writable {
+
+    private List<VLongWritable> cliques = new ArrayList<VLongWritable>();
+    private int sizeOfClique = 0;
+
+    public CliquesWritable(List<VLongWritable> cliques, int sizeOfClique) {
+        this.cliques = cliques;
+        this.sizeOfClique = sizeOfClique;
+    }
+
+    public CliquesWritable() {
+
+    }
+
+    /**
+     * Set the size of cliques.
+     * 
+     * @param sizeOfClique
+     *            the size of each maximal clique
+     */
+    public void setCliqueSize(int sizeOfClique) {
+        this.sizeOfClique = sizeOfClique;
+    }
+
+    /**
+     * Add the clique vertexes
+     * 
+     * @param cliques
+     *            the list of vertexes -- can contain multiple cliques
+     */
+    public void addCliques(CliquesWritable cliques) {
+        this.cliques.addAll(cliques.cliques);
+    }
+
+    /**
+     * Add the clique vertexes
+     * 
+     * @param cliques
+     *            the list of vertexes -- can contain multiple cliques
+     */
+    public void addCliques(List<VLongWritable> vertexes) {
+        this.cliques.addAll(vertexes);
+    }
+
+    /**
+     * @return the size of the clique
+     */
+    public int getSizeOfClique() {
+        return sizeOfClique;
+    }
+
+    /**
+     * rese the clique
+     */
+    public void reset() {
+        this.cliques.clear();
+        this.sizeOfClique = 0;
+    }
+
+    @Override
+    public void readFields(DataInput input) throws IOException {
+        cliques.clear();
+        int numCliques = input.readInt();
+        if (numCliques < 0) {
+            sizeOfClique = 0;
+            return;
+        }
+        sizeOfClique = input.readInt();
+        for (int i = 0; i < numCliques; i++) {
+            for (int j = 0; j < sizeOfClique; j++) {
+                VLongWritable vid = new VLongWritable();
+                vid.readFields(input);
+                cliques.add(vid);
+            }
+        }
+    }
+
+    @Override
+    public void write(DataOutput output) throws IOException {
+        if (sizeOfClique <= 0) {
+            output.writeInt(-1);
+            return;
+        }
+        output.writeInt(cliques.size() / sizeOfClique);
+        output.writeInt(sizeOfClique);
+
+        for (int i = 0; i < cliques.size(); i++) {
+            cliques.get(i).write(output);
+        }
+    }
+
+    @Override
+    public String toString() {
+        if (sizeOfClique == 0)
+            return "";
+        StringBuffer sb = new StringBuffer();
+        int numCliques = cliques.size() / sizeOfClique;
+        for (int i = 0; i < numCliques; i++) {
+            for (int j = 0; j < sizeOfClique - 1; j++) {
+                sb.append(cliques.get(j));
+                sb.append(",");
+            }
+            sb.append(cliques.get(sizeOfClique - 1));
+            sb.append(";");
+        }
+        return sb.toString();
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueAggregator.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueAggregator.java
new file mode 100644
index 0000000..061e9e0
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueAggregator.java
@@ -0,0 +1,65 @@
+package edu.uci.ics.pregelix.example.maximalclique;
+
+import org.apache.hadoop.io.NullWritable;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * The global aggregator aggregates the count of triangles
+ */
+public class MaximalCliqueAggregator
+        extends
+        GlobalAggregator<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable, CliquesWritable, CliquesWritable> {
+
+    private CliquesWritable state = new CliquesWritable();
+
+    @Override
+    public void init() {
+        state.reset();
+    }
+
+    @Override
+    public void step(Vertex<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> v)
+            throws HyracksDataException {
+        CliquesWritable cliques = v.getVertexValue();
+        updateAggregateState(cliques);
+    }
+
+    /**
+     * Update the current aggregate state
+     * 
+     * @param cliques the incoming cliques
+     */
+    private void updateAggregateState(CliquesWritable cliques) {
+        if (cliques.getSizeOfClique() > state.getSizeOfClique()) {
+            //reset the vertex state
+            state.reset();
+            state.setCliqueSize(cliques.getSizeOfClique());
+            state.addCliques(cliques);
+        } else if (cliques.getSizeOfClique() == state.getSizeOfClique()) {
+            //add the new cliques
+            state.addCliques(cliques);
+        } else {
+            return;
+        }
+    }
+
+    @Override
+    public void step(CliquesWritable partialResult) {
+        updateAggregateState(partialResult);
+    }
+
+    @Override
+    public CliquesWritable finishPartial() {
+        return state;
+    }
+
+    @Override
+    public CliquesWritable finishFinal() {
+        return state;
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueVertex.java
new file mode 100644
index 0000000..266feb7
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueVertex.java
@@ -0,0 +1,347 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.maximalclique;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Edge;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexWriter;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat.TextVertexWriter;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+import edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex;
+
+/**
+ * The maximal clique example -- find maximal cliques in an undirected graph.
+ * The result cliques contains vertexes ordered by the vertex id ascendingly. The algorithm takes
+ * advantage of that property to do effective pruning.
+ */
+public class MaximalCliqueVertex extends Vertex<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> {
+
+    private Map<VLongWritable, AdjacencyListWritable> map = new TreeMap<VLongWritable, AdjacencyListWritable>();
+    private List<VLongWritable> vertexList = new ArrayList<VLongWritable>();
+    private Map<VLongWritable, Integer> invertedMap = new TreeMap<VLongWritable, Integer>();
+    private int largestCliqueSizeSoFar = 0;
+    private List<BitSet> currentMaximalCliques = new ArrayList<BitSet>();
+    private CliquesWritable tmpValue = new CliquesWritable();
+    private List<VLongWritable> cliques = new ArrayList<VLongWritable>();
+
+    /**
+     * Update the current maximal cliques
+     * 
+     * @param values
+     *            the received adjcency lists
+     */
+    private void updateCurrentMaximalCliques(Iterator<AdjacencyListWritable> values) {
+        map.clear();
+        vertexList.clear();
+        invertedMap.clear();
+        currentMaximalCliques.clear();
+        cliques.clear();
+        tmpValue.reset();
+
+        // build the initial sub graph
+        while (values.hasNext()) {
+            AdjacencyListWritable adj = values.next();
+            map.put(adj.getSource(), adj);
+        }
+        VLongWritable srcId = getVertexId();
+        map.put(srcId, new AdjacencyListWritable());
+
+        // build the vertex list (vertex id in ascending order) and the inverted list of vertexes
+        int i = 0;
+        for (VLongWritable v : map.keySet()) {
+            vertexList.add(v);
+            invertedMap.put(v, i++);
+        }
+
+        //clean up adjacency list --- remove vertexes who are not neighbors of key
+        for (AdjacencyListWritable adj : map.values()) {
+            adj.cleanNonMatch(vertexList);
+        }
+
+        // get the h-index of the subgraph --- which is the maximum depth to explore
+        int[] neighborCounts = new int[map.size()];
+        i = 0;
+        for (AdjacencyListWritable adj : map.values()) {
+            neighborCounts[i++] = adj.numberOfNeighbors();
+        }
+        Arrays.sort(neighborCounts);
+        int h = 0;
+        for (i = neighborCounts.length - 1; i >= 0; i--) {
+            if (h >= neighborCounts[i]) {
+                break;
+            }
+            h++;
+        }
+        if (h < largestCliqueSizeSoFar) {
+            return;
+        }
+
+        //start depth-first search
+        BitSet cliqueSoFar = new BitSet(h);
+        for (VLongWritable v : vertexList) {
+            cliqueSoFar.set(invertedMap.get(v));
+            searchClique(h, cliqueSoFar, 1, v);
+            cliqueSoFar.clear();
+        }
+
+        //output local maximal cliques
+        for (BitSet clique : currentMaximalCliques) {
+            int keyIndex = invertedMap.get(srcId);
+            clique.set(keyIndex);
+            generateClique(clique);
+            tmpValue.addCliques(cliques);
+            tmpValue.setCliqueSize(clique.cardinality());
+        }
+
+        //update the vertex state
+        setVertexValue(tmpValue);
+    }
+
+    /**
+     * Output a clique with vertex ids.
+     * 
+     * @param clique
+     *            the bitmap representation of a clique
+     */
+    private void generateClique(BitSet clique) {
+        for (int j = 0; j < clique.length();) {
+            j = clique.nextSetBit(j);
+            VLongWritable v = vertexList.get(j);
+            cliques.add(v);
+            j++;
+        }
+    }
+
+    /**
+     * find cliques using the depth-first search
+     * 
+     * @param maxDepth
+     *            the maximum search depth
+     * @param cliqueSoFar
+     *            the the cliques found so far
+     * @param depthSoFar
+     *            the current search depth
+     * @param currentSource
+     *            the vertex to be added into the clique
+     */
+    private void searchClique(int maxDepth, BitSet cliqueSoFar, int depthSoFar, VLongWritable currentSource) {
+        if (depthSoFar > maxDepth) {
+            // update maximal clique info
+            updateMaximalClique(cliqueSoFar);
+            return;
+        }
+
+        AdjacencyListWritable adj = map.get(currentSource);
+        Iterator<VLongWritable> neighbors = adj.getNeighbors();
+        ++depthSoFar;
+        while (neighbors.hasNext()) {
+            VLongWritable neighbor = neighbors.next();
+            if (!isTested(neighbor, cliqueSoFar) && isClique(neighbor, cliqueSoFar)) {
+                //snapshot the clique
+                int cliqueLength = cliqueSoFar.length();
+                // expand the clique
+                cliqueSoFar.set(invertedMap.get(neighbor));
+                searchClique(maxDepth, cliqueSoFar, depthSoFar, neighbor);
+                // back to the snapshot clique
+                cliqueSoFar.set(cliqueLength, cliqueSoFar.length(), false);
+            }
+        }
+
+        // update maximal clique info
+        updateMaximalClique(cliqueSoFar);
+    }
+
+    /**
+     * Update the maximal clique to a larger one if it exists
+     * 
+     * @param cliqueSoFar
+     *            the clique so far, in the bitmap representation
+     */
+    private void updateMaximalClique(BitSet cliqueSoFar) {
+        int cliqueSize = cliqueSoFar.cardinality();
+        if (cliqueSize > largestCliqueSizeSoFar) {
+            currentMaximalCliques.clear();
+            currentMaximalCliques.add((BitSet) cliqueSoFar.clone());
+            largestCliqueSizeSoFar = cliqueSize;
+        } else if (cliqueSize == largestCliqueSizeSoFar) {
+            currentMaximalCliques.add((BitSet) cliqueSoFar.clone());
+        } else {
+            return;
+        }
+    }
+
+    /**
+     * Should we test the vertex newVertex?
+     * 
+     * @param newVertex
+     *            the vertex to be tested
+     * @param cliqueSoFar
+     *            the current clique, in the bitmap representation
+     * @return true if new vertex has been tested
+     */
+    private boolean isTested(VLongWritable newVertex, BitSet cliqueSoFar) {
+        int index = invertedMap.get(newVertex);
+        int largestSetIndex = cliqueSoFar.length() - 1;
+        if (index > largestSetIndex) {
+            // we only return cliques with vertexes in the ascending order
+            // hence, the new vertex must be larger than the largesetSetIndex in the clique
+            return false;
+        } else {
+            // otherwise, we think the vertex is "tested"
+            return true;
+        }
+    }
+
+    /**
+     * Will adding the newVertex yield a bigger clique?
+     * 
+     * @param newVertex
+     *            the new vertex id
+     * @param cliqueSoFar
+     *            the bitmap representation of the clique
+     * @return true if adding the new vertex yelds a bigger clique
+     */
+    private boolean isClique(VLongWritable newVertex, BitSet cliqueSoFar) {
+        AdjacencyListWritable adj = map.get(newVertex);
+        // check whether each existing vertex is in the neighbor set of newVertex
+        for (int i = 0; i < cliqueSoFar.length();) {
+            i = cliqueSoFar.nextSetBit(i);
+            VLongWritable v = vertexList.get(i);
+            if (!adj.isNeighbor(v)) {
+                return false;
+            }
+            i++;
+        }
+        return true;
+    }
+
+    /**
+     * For superstep 1, send outgoing mesages.
+     * For superstep 2, calculate maximal cliques.
+     * otherwise, vote to halt.
+     */
+    @Override
+    public void compute(Iterator<AdjacencyListWritable> msgIterator) {
+        if (getSuperstep() == 1) {
+            sortEdges();
+            sendOutgoingMsgs(getEdges());
+        } else if (getSuperstep() == 2) {
+            updateCurrentMaximalCliques(msgIterator);
+        } else {
+            voteToHalt();
+        }
+    }
+
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
+    private static CliquesWritable readMaximalCliqueResult(Configuration conf) {
+        try {
+            CliquesWritable result = (CliquesWritable) IterationUtils.readGlobalAggregateValue(conf,
+                    BspUtils.getJobId(conf));
+            return result;
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        PregelixJob job = new PregelixJob(TriangleCountingVertex.class.getSimpleName());
+        job.setVertexClass(MaximalCliqueVertex.class);
+        job.setGlobalAggregatorClass(MaximalCliqueAggregator.class);
+        job.setDynamicVertexValueSize(true);
+        job.setVertexInputFormatClass(TextMaximalCliqueInputFormat.class);
+        job.setVertexOutputFormatClass(MaximalCliqueVertexOutputFormat.class);
+        Client.run(args, job);
+        System.out.println("maximal cliques: \n" + readMaximalCliqueResult(job.getConfiguration()));
+    }
+
+    /**
+     * Send the adjacency lists
+     * 
+     * @param edges
+     *            the outgoing edges
+     */
+    private void sendOutgoingMsgs(List<Edge<VLongWritable, NullWritable>> edges) {
+        for (int i = 0; i < edges.size(); i++) {
+            if (edges.get(i).getDestVertexId().get() < getVertexId().get()) {
+                // only add emit for the vertexes whose id is smaller than the vertex id 
+                // to avoid the duplicate removal step,
+                // because all the resulting cliques will have vertexes in the ascending order.
+                AdjacencyListWritable msg = new AdjacencyListWritable();
+                msg.setSource(getVertexId());
+                for (int j = i + 1; j < edges.size(); j++) {
+                    msg.addNeighbor(edges.get(j).getDestVertexId());
+                }
+                sendMsg(edges.get(i).getDestVertexId(), msg);
+            }
+        }
+    }
+
+    /**
+     * Maximal Clique VertexWriter
+     */
+    public static class MaximalCliqueVertexWriter extends
+            TextVertexWriter<VLongWritable, CliquesWritable, NullWritable> {
+        public MaximalCliqueVertexWriter(RecordWriter<Text, Text> lineRecordWriter) {
+            super(lineRecordWriter);
+        }
+
+        @Override
+        public void writeVertex(Vertex<VLongWritable, CliquesWritable, NullWritable, ?> vertex) throws IOException,
+                InterruptedException {
+            getRecordWriter().write(new Text(vertex.getVertexId().toString()),
+                    new Text(vertex.getVertexValue().toString()));
+        }
+    }
+
+    /**
+     * output format for maximal clique
+     */
+    public static class MaximalCliqueVertexOutputFormat extends
+            TextVertexOutputFormat<VLongWritable, CliquesWritable, NullWritable> {
+
+        @Override
+        public VertexWriter<VLongWritable, CliquesWritable, NullWritable> createVertexWriter(TaskAttemptContext context)
+                throws IOException, InterruptedException {
+            RecordWriter<Text, Text> recordWriter = textOutputFormat.getRecordWriter(context);
+            return new MaximalCliqueVertexWriter(recordWriter);
+        }
+
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/TextMaximalCliqueInputFormat.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/TextMaximalCliqueInputFormat.java
new file mode 100644
index 0000000..ec7b32c
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/TextMaximalCliqueInputFormat.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.maximalclique;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat.TextVertexReader;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+public class TextMaximalCliqueInputFormat extends
+        TextVertexInputFormat<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> {
+
+    @Override
+    public VertexReader<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> createVertexReader(
+            InputSplit split, TaskAttemptContext context) throws IOException {
+        return new TextMaximalCliqueGraphReader(textInputFormat.createRecordReader(split, context));
+    }
+}
+
+@SuppressWarnings("rawtypes")
+class TextMaximalCliqueGraphReader extends
+        TextVertexReader<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> {
+
+    private final static String separator = " ";
+    private Vertex vertex;
+    private VLongWritable vertexId = new VLongWritable();
+    private List<VLongWritable> pool = new ArrayList<VLongWritable>();
+    private int used = 0;
+
+    public TextMaximalCliqueGraphReader(RecordReader<LongWritable, Text> lineRecordReader) {
+        super(lineRecordReader);
+    }
+
+    @Override
+    public boolean nextVertex() throws IOException, InterruptedException {
+        return getRecordReader().nextKeyValue();
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public Vertex<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> getCurrentVertex()
+            throws IOException, InterruptedException {
+        used = 0;
+        if (vertex == null)
+            vertex = (Vertex) BspUtils.createVertex(getContext().getConfiguration());
+        vertex.getMsgList().clear();
+        vertex.getEdges().clear();
+
+        vertex.reset();
+        Text line = getRecordReader().getCurrentValue();
+        String[] fields = line.toString().split(separator);
+
+        if (fields.length > 0) {
+            /**
+             * set the src vertex id
+             */
+            long src = Long.parseLong(fields[0]);
+            vertexId.set(src);
+            vertex.setVertexId(vertexId);
+            long dest = -1L;
+
+            /**
+             * set up edges
+             */
+            for (int i = 1; i < fields.length; i++) {
+                dest = Long.parseLong(fields[i]);
+                VLongWritable destId = allocate();
+                destId.set(dest);
+                vertex.addEdge(destId, null);
+            }
+        }
+        return vertex;
+    }
+
+    private VLongWritable allocate() {
+        if (used >= pool.size()) {
+            VLongWritable value = new VLongWritable();
+            pool.add(value);
+            used++;
+            return value;
+        } else {
+            VLongWritable value = pool.get(used);
+            used++;
+            return value;
+        }
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TextTriangleCountingInputFormat.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TextTriangleCountingInputFormat.java
new file mode 100644
index 0000000..bb399ff
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TextTriangleCountingInputFormat.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.example.trianglecounting;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexReader;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat.TextVertexReader;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+public class TextTriangleCountingInputFormat extends
+        TextVertexInputFormat<VLongWritable, VLongWritable, VLongWritable, VLongWritable> {
+
+    @Override
+    public VertexReader<VLongWritable, VLongWritable, VLongWritable, VLongWritable> createVertexReader(
+            InputSplit split, TaskAttemptContext context) throws IOException {
+        return new TextPageRankGraphReader(textInputFormat.createRecordReader(split, context));
+    }
+}
+
+@SuppressWarnings("rawtypes")
+class TextPageRankGraphReader extends TextVertexReader<VLongWritable, VLongWritable, VLongWritable, VLongWritable> {
+
+    private final static String separator = " ";
+    private Vertex vertex;
+    private VLongWritable vertexId = new VLongWritable();
+    private List<VLongWritable> pool = new ArrayList<VLongWritable>();
+    private int used = 0;
+
+    public TextPageRankGraphReader(RecordReader<LongWritable, Text> lineRecordReader) {
+        super(lineRecordReader);
+    }
+
+    @Override
+    public boolean nextVertex() throws IOException, InterruptedException {
+        return getRecordReader().nextKeyValue();
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public Vertex<VLongWritable, VLongWritable, VLongWritable, VLongWritable> getCurrentVertex() throws IOException,
+            InterruptedException {
+        used = 0;
+        if (vertex == null)
+            vertex = (Vertex) BspUtils.createVertex(getContext().getConfiguration());
+        vertex.getMsgList().clear();
+        vertex.getEdges().clear();
+
+        vertex.reset();
+        Text line = getRecordReader().getCurrentValue();
+        String[] fields = line.toString().split(separator);
+
+        if (fields.length > 0) {
+            /**
+             * set the src vertex id
+             */
+            long src = Long.parseLong(fields[0]);
+            vertexId.set(src);
+            vertex.setVertexId(vertexId);
+            long dest = -1L;
+
+            /**
+             * set up edges
+             */
+            for (int i = 1; i < fields.length; i++) {
+                dest = Long.parseLong(fields[i]);
+                VLongWritable destId = allocate();
+                destId.set(dest);
+                vertex.addEdge(destId, null);
+            }
+        }
+        // vertex.sortEdges();
+        return vertex;
+    }
+
+    private VLongWritable allocate() {
+        if (used >= pool.size()) {
+            VLongWritable value = new VLongWritable();
+            pool.add(value);
+            used++;
+            return value;
+        } else {
+            VLongWritable value = pool.get(used);
+            used++;
+            return value;
+        }
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingAggregator.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingAggregator.java
new file mode 100644
index 0000000..67b028d
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingAggregator.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.pregelix.example.trianglecounting;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * The global aggregator aggregates the count of triangles
+ */
+public class TriangleCountingAggregator extends
+        GlobalAggregator<VLongWritable, VLongWritable, VLongWritable, VLongWritable, VLongWritable, VLongWritable> {
+
+    private VLongWritable state = new VLongWritable(0);
+
+    @Override
+    public void init() {
+        state.set(0);
+    }
+
+    @Override
+    public void step(Vertex<VLongWritable, VLongWritable, VLongWritable, VLongWritable> v) throws HyracksDataException {
+        state.set(state.get() + v.getVertexValue().get());
+    }
+
+    @Override
+    public void step(VLongWritable partialResult) {
+        state.set(state.get() + partialResult.get());
+    }
+
+    @Override
+    public VLongWritable finishPartial() {
+        return state;
+    }
+
+    @Override
+    public VLongWritable finishFinal() {
+        return state;
+    }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingVertex.java
new file mode 100644
index 0000000..d3db095
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingVertex.java
@@ -0,0 +1,153 @@
+package edu.uci.ics.pregelix.example.trianglecounting;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+import edu.uci.ics.pregelix.api.graph.Edge;
+import edu.uci.ics.pregelix.api.graph.Vertex;
+import edu.uci.ics.pregelix.api.io.VertexWriter;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat;
+import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat.TextVertexWriter;
+import edu.uci.ics.pregelix.api.job.PregelixJob;
+import edu.uci.ics.pregelix.api.util.BspUtils;
+import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
+import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.io.VLongWritable;
+
+/**
+ * The triangle counting example -- counting the triangles in an undirected graph.
+ */
+public class TriangleCountingVertex extends Vertex<VLongWritable, VLongWritable, VLongWritable, VLongWritable> {
+
+    private VLongWritable tmpValue = new VLongWritable(0);
+    private long triangleCount = 0;
+    private Edge<VLongWritable, VLongWritable> candidateEdge = new Edge<VLongWritable, VLongWritable>(
+            new VLongWritable(0), new VLongWritable(0));
+    private EdgeComparator edgeComparator = new EdgeComparator();
+
+    @Override
+    public void compute(Iterator<VLongWritable> msgIterator) {
+        // transforms the edge list into a set to facilitate lookup
+        if (getSuperstep() == 1) {
+            // sorting edges could be avoid if the dataset already has that property
+            sortEdges();
+            List<Edge<VLongWritable, VLongWritable>> edges = this.getEdges();
+            int numEdges = edges.size();
+
+            //decoding longs
+            long src = getVertexId().get();
+            long[] dests = new long[numEdges];
+            for (int i = 0; i < numEdges; i++) {
+                dests[i] = edges.get(i).getDestVertexId().get();
+            }
+
+            //send messages -- take advantage of that each discovered 
+            //triangle should have vertexes ordered by vertex id
+            for (int i = 0; i < numEdges; i++) {
+                if (dests[i] < src) {
+                    for (int j = i + 1; j < numEdges; j++) {
+                        //send messages -- v_j.id > v_i.id -- guaranteed by sortEdge()
+                        if (dests[j] > src) {
+                            sendMsg(edges.get(i).getDestVertexId(), edges.get(j).getDestVertexId());
+                        }
+                    }
+                }
+            }
+        }
+        if (getSuperstep() >= 2) {
+            triangleCount = 0;
+            List<Edge<VLongWritable, VLongWritable>> edges = this.getEdges();
+            while (msgIterator.hasNext()) {
+                VLongWritable msg = msgIterator.next();
+                candidateEdge.setDestVertexId(msg);
+                if (Collections.binarySearch(edges, candidateEdge, edgeComparator) >= 0) {
+                    // if the msg value is a dest from this vertex
+                    triangleCount++;
+                }
+            }
+
+            // set vertex value
+            tmpValue.set(triangleCount);
+            setVertexValue(tmpValue);
+            voteToHalt();
+        }
+    }
+
+    /**
+     * Triangle Counting VertexWriter
+     */
+    public static class TriangleCountingVertexWriter extends
+            TextVertexWriter<VLongWritable, VLongWritable, VLongWritable> {
+        public TriangleCountingVertexWriter(RecordWriter<Text, Text> lineRecordWriter) {
+            super(lineRecordWriter);
+        }
+
+        @Override
+        public void writeVertex(Vertex<VLongWritable, VLongWritable, VLongWritable, ?> vertex) throws IOException,
+                InterruptedException {
+            getRecordWriter().write(new Text(vertex.getVertexId().toString()),
+                    new Text(vertex.getVertexValue().toString()));
+        }
+    }
+
+    @Override
+    public String toString() {
+        return getVertexId() + " " + getVertexValue();
+    }
+
+    /**
+     * output format for triangle counting
+     */
+    public static class TriangleCountingVertexOutputFormat extends
+            TextVertexOutputFormat<VLongWritable, VLongWritable, VLongWritable> {
+
+        @Override
+        public VertexWriter<VLongWritable, VLongWritable, VLongWritable> createVertexWriter(TaskAttemptContext context)
+                throws IOException, InterruptedException {
+            RecordWriter<Text, Text> recordWriter = textOutputFormat.getRecordWriter(context);
+            return new TriangleCountingVertexWriter(recordWriter);
+        }
+
+    }
+
+    private static long readTriangleCountingResult(Configuration conf) {
+        try {
+            VLongWritable count = (VLongWritable) IterationUtils
+                    .readGlobalAggregateValue(conf, BspUtils.getJobId(conf));
+            return count.get();
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        PregelixJob job = new PregelixJob(TriangleCountingVertex.class.getSimpleName());
+        job.setVertexClass(TriangleCountingVertex.class);
+        job.setGlobalAggregatorClass(TriangleCountingAggregator.class);
+        job.setVertexInputFormatClass(TextTriangleCountingInputFormat.class);
+        job.setVertexOutputFormatClass(TriangleCountingVertexOutputFormat.class);
+        Client.run(args, job);
+        System.out.println("triangle count: " + readTriangleCountingResult(job.getConfiguration()));
+    }
+}
+
+/**
+ * The comparator for Edge<VLongWritable, VLongWritable>.
+ */
+class EdgeComparator implements Comparator<Edge<VLongWritable, VLongWritable>> {
+
+    @Override
+    public int compare(Edge<VLongWritable, VLongWritable> left, Edge<VLongWritable, VLongWritable> right) {
+        long leftValue = left.getDestVertexId().get();
+        long rightValue = right.getDestVertexId().get();
+        return leftValue > rightValue ? 1 : (leftValue < rightValue ? -1 : 0);
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexAggregator.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexAggregator.java
new file mode 100644
index 0000000..d8f704e
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexAggregator.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.utils;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+@SuppressWarnings("deprecation")
+public class VertexAggregator {
+
+    public static class MapRecordOnly extends MapReduceBase implements
+            Mapper<LongWritable, Text, NullWritable, LongWritable> {
+        private final NullWritable nullValue = NullWritable.get();
+        private final LongWritable count = new LongWritable(1);
+
+        public void map(LongWritable id, Text inputValue, OutputCollector<NullWritable, LongWritable> output,
+                Reporter reporter) throws IOException {
+            output.collect(nullValue, count);
+        }
+    }
+
+    public static class CombineRecordOnly extends MapReduceBase implements
+            Reducer<NullWritable, LongWritable, NullWritable, LongWritable> {
+        private final NullWritable nullValue = NullWritable.get();
+
+        public void reduce(NullWritable inputKey, Iterator<LongWritable> inputValue,
+                OutputCollector<NullWritable, LongWritable> output, Reporter reporter) throws IOException {
+            long count = 0;
+            while (inputValue.hasNext())
+                count += inputValue.next().get();
+            output.collect(nullValue, new LongWritable(count));
+        }
+    }
+
+    public static class ReduceRecordOnly extends MapReduceBase implements
+            Reducer<NullWritable, LongWritable, NullWritable, Text> {
+        private final NullWritable nullValue = NullWritable.get();
+
+        public void reduce(NullWritable inputKey, Iterator<LongWritable> inputValue,
+                OutputCollector<NullWritable, Text> output, Reporter reporter) throws IOException {
+            long count = 0;
+            while (inputValue.hasNext())
+                count += inputValue.next().get();
+            output.collect(nullValue, new Text(Long.toString(count)));
+        }
+    }
+
+    public static void main(String[] args) throws IOException {
+        JobConf job = new JobConf(VertexAggregator.class);
+
+        job.setJobName(VertexAggregator.class.getSimpleName());
+        job.setMapperClass(MapRecordOnly.class);
+        job.setCombinerClass(CombineRecordOnly.class);
+        job.setReducerClass(ReduceRecordOnly.class);
+        job.setMapOutputKeyClass(NullWritable.class);
+        job.setMapOutputValueClass(LongWritable.class);
+
+        job.setInputFormat(TextInputFormat.class);
+        FileInputFormat.setInputPaths(job, args[0]);
+        FileOutputFormat.setOutputPath(job, new Path(args[1]));
+        job.setNumReduceTasks(Integer.parseInt(args[2]));
+        JobClient.runJob(job);
+    }
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexSorter.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexSorter.java
new file mode 100644
index 0000000..8421088
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/utils/VertexSorter.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.example.utils;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextInputFormat;
+
+@SuppressWarnings("deprecation")
+public class VertexSorter {
+    public static class MapRecordOnly extends MapReduceBase implements Mapper<LongWritable, Text, LongWritable, Text> {
+        private static String separator = " ";
+
+        public void map(LongWritable id, Text inputValue, OutputCollector<LongWritable, Text> output, Reporter reporter)
+                throws IOException {
+            String[] fields = inputValue.toString().split(separator);
+            LongWritable vertexId = new LongWritable(Long.parseLong(fields[0]));
+            output.collect(vertexId, inputValue);
+        }
+    }
+
+    public static class ReduceRecordOnly extends MapReduceBase implements
+            Reducer<LongWritable, Text, NullWritable, Text> {
+
+        NullWritable key = NullWritable.get();
+
+        public void reduce(LongWritable inputKey, Iterator<Text> inputValue,
+                OutputCollector<NullWritable, Text> output, Reporter reporter) throws IOException {
+            while (inputValue.hasNext())
+                output.collect(key, inputValue.next());
+        }
+    }
+
+    public static void main(String[] args) throws IOException {
+        JobConf job = new JobConf(VertexSorter.class);
+
+        job.setJobName(VertexSorter.class.getSimpleName());
+        job.setMapperClass(MapRecordOnly.class);
+        job.setReducerClass(ReduceRecordOnly.class);
+        job.setMapOutputKeyClass(LongWritable.class);
+        job.setMapOutputValueClass(Text.class);
+
+        job.setInputFormat(TextInputFormat.class);
+        FileInputFormat.setInputPaths(job, args[0]);
+        FileOutputFormat.setOutputPath(job, new Path(args[1]));
+        job.setNumReduceTasks(Integer.parseInt(args[2]));
+        JobClient.runJob(job);
+    }
+}
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/dataload/DataLoadTest.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/dataload/DataLoadTest.java
index 7787347..321b5b2 100644
--- a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/dataload/DataLoadTest.java
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/dataload/DataLoadTest.java
@@ -37,7 +37,7 @@
 import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
 import edu.uci.ics.pregelix.core.util.PregelixHyracksIntegrationUtil;
 import edu.uci.ics.pregelix.example.PageRankVertex;
-import edu.uci.ics.pregelix.example.PageRankVertex.SimplePageRankVertexInputFormat;
+import edu.uci.ics.pregelix.example.PageRankVertex.SimulatedPageRankVertexInputFormat;
 import edu.uci.ics.pregelix.example.util.TestUtils;
 
 @SuppressWarnings("deprecation")
@@ -65,7 +65,7 @@
     public DataLoadTest() throws Exception {
         job = new PregelixJob(GIRAPH_JOB_NAME);
         job.setVertexClass(PageRankVertex.class);
-        job.setVertexInputFormatClass(SimplePageRankVertexInputFormat.class);
+        job.setVertexInputFormatClass(SimulatedPageRankVertexInputFormat.class);
         job.getConfiguration().setClass(PregelixJob.VERTEX_INDEX_CLASS, LongWritable.class, WritableComparable.class);
         job.getConfiguration().setClass(PregelixJob.VERTEX_VALUE_CLASS, DoubleWritable.class, Writable.class);
         job.getConfiguration().setClass(PregelixJob.EDGE_VALUE_CLASS, FloatWritable.class, Writable.class);
@@ -76,7 +76,7 @@
         ClusterConfig.setStorePath(PATH_TO_CLUSTER_STORE);
         ClusterConfig.setClusterPropertiesPath(PATH_TO_CLUSTER_PROPERTIES);
         cleanupStores();
-        PregelixHyracksIntegrationUtil.init();
+        PregelixHyracksIntegrationUtil.init("src/test/resources/topology.xml");
         PregelixHyracksIntegrationUtil.createApp(HYRACKS_APP_NAME);
         LOGGER.info("Hyracks mini-cluster started");
         startHDFS();
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobgen/JobGenerator.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobgen/JobGenerator.java
index c0b4a10..ca5a1c4 100644
--- a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobgen/JobGenerator.java
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobgen/JobGenerator.java
@@ -25,9 +25,11 @@
 import edu.uci.ics.pregelix.api.job.PregelixJob;
 import edu.uci.ics.pregelix.example.ConnectedComponentsVertex;
 import edu.uci.ics.pregelix.example.ConnectedComponentsVertex.SimpleConnectedComponentsVertexOutputFormat;
+import edu.uci.ics.pregelix.example.GraphMutationVertex;
+import edu.uci.ics.pregelix.example.GraphMutationVertex.SimpleGraphMutationVertexOutputFormat;
 import edu.uci.ics.pregelix.example.PageRankVertex;
-import edu.uci.ics.pregelix.example.PageRankVertex.SimplePageRankVertexInputFormat;
 import edu.uci.ics.pregelix.example.PageRankVertex.SimplePageRankVertexOutputFormat;
+import edu.uci.ics.pregelix.example.PageRankVertex.SimulatedPageRankVertexInputFormat;
 import edu.uci.ics.pregelix.example.ReachabilityVertex;
 import edu.uci.ics.pregelix.example.ReachabilityVertex.SimpleReachibilityVertexOutputFormat;
 import edu.uci.ics.pregelix.example.ShortestPathsVertex;
@@ -35,6 +37,14 @@
 import edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat;
 import edu.uci.ics.pregelix.example.inputformat.TextReachibilityVertexInputFormat;
 import edu.uci.ics.pregelix.example.inputformat.TextShortestPathsInputFormat;
+import edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueAggregator;
+import edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex;
+import edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex.MaximalCliqueVertexOutputFormat;
+import edu.uci.ics.pregelix.example.maximalclique.TextMaximalCliqueInputFormat;
+import edu.uci.ics.pregelix.example.trianglecounting.TextTriangleCountingInputFormat;
+import edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingAggregator;
+import edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex;
+import edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex.TriangleCountingVertexOutputFormat;
 
 public class JobGenerator {
     private static String outputBase = "src/test/resources/jobs/";
@@ -44,6 +54,9 @@
     private static String HDFS_INPUTPATH2 = "/webmapcomplex";
     private static String HDFS_OUTPUTPAH2 = "/resultcomplex";
 
+    private static String HDFS_INPUTPATH3 = "/clique";
+    private static String HDFS_OUTPUTPAH3 = "/resultclique";
+
     private static void generatePageRankJobReal(String jobName, String outputPath) throws IOException {
         PregelixJob job = new PregelixJob(jobName);
         job.setVertexClass(PageRankVertex.class);
@@ -148,7 +161,7 @@
     private static void generatePageRankJob(String jobName, String outputPath) throws IOException {
         PregelixJob job = new PregelixJob(jobName);
         job.setVertexClass(PageRankVertex.class);
-        job.setVertexInputFormatClass(SimplePageRankVertexInputFormat.class);
+        job.setVertexInputFormatClass(SimulatedPageRankVertexInputFormat.class);
         job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
         job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
         FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
@@ -157,26 +170,74 @@
         job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
     }
 
+    private static void generateShortestPathJob(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(ShortestPathsVertex.class);
+        job.setVertexInputFormatClass(SimulatedPageRankVertexInputFormat.class);
+        job.setMessageCombinerClass(ShortestPathsVertex.SimpleMinCombiner.class);
+        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
+        job.getConfiguration().setLong(ShortestPathsVertex.SOURCE_ID, 0);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generatePageRankJobRealDynamic(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(PageRankVertex.class);
+        job.setVertexInputFormatClass(TextPageRankInputFormat.class);
+        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+        job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
+        job.setDynamicVertexValueSize(true);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generateTriangleCountingJob(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(TriangleCountingVertex.class);
+        job.setGlobalAggregatorClass(TriangleCountingAggregator.class);
+        job.setVertexInputFormatClass(TextTriangleCountingInputFormat.class);
+        job.setVertexOutputFormatClass(TriangleCountingVertexOutputFormat.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH3);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH3));
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generateMaximalCliqueJob(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(MaximalCliqueVertex.class);
+        job.setGlobalAggregatorClass(MaximalCliqueAggregator.class);
+        job.setDynamicVertexValueSize(true);
+        job.setVertexInputFormatClass(TextMaximalCliqueInputFormat.class);
+        job.setVertexOutputFormatClass(MaximalCliqueVertexOutputFormat.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH3);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH3));
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
+    private static void generateGraphMutationJob(String jobName, String outputPath) throws IOException {
+        PregelixJob job = new PregelixJob(jobName);
+        job.setVertexClass(GraphMutationVertex.class);
+        job.setVertexInputFormatClass(TextPageRankInputFormat.class);
+        job.setVertexOutputFormatClass(SimpleGraphMutationVertexOutputFormat.class);
+        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
+        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
+        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
+        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+    }
+
     private static void genPageRank() throws IOException {
         generatePageRankJob("PageRank", outputBase + "PageRank.xml");
         generatePageRankJobReal("PageRank", outputBase + "PageRankReal.xml");
+        generatePageRankJobRealDynamic("PageRank", outputBase + "PageRankRealDynamic.xml");
         generatePageRankJobRealComplex("PageRank", outputBase + "PageRankRealComplex.xml");
         generatePageRankJobRealNoCombiner("PageRank", outputBase + "PageRankRealNoCombiner.xml");
     }
 
-    private static void generateShortestPathJob(String jobName, String outputPath) throws IOException {
-        PregelixJob job = new PregelixJob(jobName);
-        job.setVertexClass(ShortestPathsVertex.class);
-        job.setVertexInputFormatClass(SimplePageRankVertexInputFormat.class);
-        job.setMessageCombinerClass(ShortestPathsVertex.SimpleMinCombiner.class);
-        job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
-        FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
-        FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
-        job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
-        job.getConfiguration().setLong(ShortestPathsVertex.SOURCE_ID, 0);
-        job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
-    }
-
     private static void genShortestPath() throws IOException {
         generateShortestPathJob("ShortestPaths", outputBase + "ShortestPaths.xml");
         generateShortestPathJobReal("ShortestPaths", outputBase + "ShortestPathsReal.xml");
@@ -194,11 +255,25 @@
                 + "ReachibilityRealComplexNoConnectivity.xml");
     }
 
+    private static void genTriangleCounting() throws IOException {
+        generateTriangleCountingJob("Triangle Counting", outputBase + "TriangleCounting.xml");
+    }
+
+    private static void genMaximalClique() throws IOException {
+        generateMaximalCliqueJob("Maximal Clique", outputBase + "MaximalClique.xml");
+    }
+
+    private static void genGraphMutation() throws IOException {
+        generateGraphMutationJob("Graph Mutation", outputBase + "GraphMutation.xml");
+    }
+
     public static void main(String[] args) throws IOException {
         genPageRank();
         genShortestPath();
         genConnectedComponents();
         genReachibility();
+        genTriangleCounting();
+        genMaximalClique();
+        genGraphMutation();
     }
-
 }
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestCase.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestCase.java
index 89bce34..5a556fa 100644
--- a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestCase.java
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestCase.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.pregelix.core.jobgen.JobGen;
 import edu.uci.ics.pregelix.core.jobgen.JobGenInnerJoin;
 import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoin;
+import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoinSingleSort;
 import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoinSort;
 import edu.uci.ics.pregelix.core.util.PregelixHyracksIntegrationUtil;
 import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
@@ -37,13 +38,16 @@
 
 public class RunJobTestCase extends TestCase {
     private static final String NC1 = "nc1";
-    private static final String HYRACKS_APP_NAME = "giraph";
+    private static final String HYRACKS_APP_NAME = "pregelix";
     private static String HDFS_INPUTPATH = "/webmap";
     private static String HDFS_OUTPUTPAH = "/result";
 
     private static String HDFS_INPUTPATH2 = "/webmapcomplex";
     private static String HDFS_OUTPUTPAH2 = "/resultcomplex";
 
+    private static String HDFS_INPUTPATH3 = "/clique";
+    private static String HDFS_OUTPUTPAH3 = "/resultclique";
+
     private final PregelixJob job;
     private JobGen[] giraphJobGens;
     private final String resultFileName;
@@ -61,21 +65,24 @@
         if (inputPaths[0].toString().endsWith(HDFS_INPUTPATH)) {
             FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
             FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
-        } else {
+        } else if (inputPaths[0].toString().endsWith(HDFS_INPUTPATH2)) {
             FileInputFormat.setInputPaths(job, HDFS_INPUTPATH2);
             FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH2));
+        } else {
+            FileInputFormat.setInputPaths(job, HDFS_INPUTPATH3);
+            FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH3));
         }
         job.setJobName(jobName);
         this.resultFileName = resultFile;
         this.expectedFileName = expectedFile;
-        giraphJobGens = new JobGen[3];
+        giraphJobGens = new JobGen[4];
         giraphJobGens[0] = new JobGenOuterJoin(job);
         waitawhile();
         giraphJobGens[1] = new JobGenInnerJoin(job);
         waitawhile();
         giraphJobGens[2] = new JobGenOuterJoinSort(job);
-        //waitawhile();
-        // giraphJobGens[3] = new JobGenOuterJoinSingleSort(job);
+        waitawhile();
+        giraphJobGens[3] = new JobGenOuterJoinSingleSort(job);
     }
 
     private void waitawhile() throws InterruptedException {
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestSuite.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestSuite.java
index 8a5c34b..fa98ebd 100644
--- a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestSuite.java
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestSuite.java
@@ -41,160 +41,176 @@
 
 @SuppressWarnings("deprecation")
 public class RunJobTestSuite extends TestSuite {
-    private static final Logger LOGGER = Logger.getLogger(RunJobTestSuite.class.getName());
+	private static final Logger LOGGER = Logger.getLogger(RunJobTestSuite.class
+			.getName());
 
-    private static final String ACTUAL_RESULT_DIR = "actual";
-    private static final String EXPECTED_RESULT_DIR = "src/test/resources/expected";
-    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
-    private static final String PATH_TO_CLUSTER_STORE = "src/test/resources/cluster/stores.properties";
-    private static final String PATH_TO_CLUSTER_PROPERTIES = "src/test/resources/cluster/cluster.properties";
-    private static final String PATH_TO_JOBS = "src/test/resources/jobs/";
-    private static final String PATH_TO_IGNORE = "src/test/resources/ignore.txt";
-    private static final String PATH_TO_ONLY = "src/test/resources/only.txt";
-    private static final String FILE_EXTENSION_OF_RESULTS = "result";
+	private static final String ACTUAL_RESULT_DIR = "actual";
+	private static final String EXPECTED_RESULT_DIR = "src/test/resources/expected";
+	private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
+	private static final String PATH_TO_CLUSTER_STORE = "src/test/resources/cluster/stores.properties";
+	private static final String PATH_TO_CLUSTER_PROPERTIES = "src/test/resources/cluster/cluster.properties";
+	private static final String PATH_TO_JOBS = "src/test/resources/jobs/";
+	private static final String PATH_TO_IGNORE = "src/test/resources/ignore.txt";
+	private static final String PATH_TO_ONLY = "src/test/resources/only.txt";
+	private static final String FILE_EXTENSION_OF_RESULTS = "result";
 
-    private static final String DATA_PATH = "data/webmap/webmap_link.txt";
-    private static final String HDFS_PATH = "/webmap/";
+	private static final String DATA_PATH = "data/webmap/webmap_link.txt";
+	private static final String HDFS_PATH = "/webmap/";
 
-    private static final String DATA_PATH2 = "data/webmapcomplex/webmap_link.txt";
-    private static final String HDFS_PATH2 = "/webmapcomplex/";
+	private static final String DATA_PATH2 = "data/webmapcomplex/webmap_link.txt";
+	private static final String HDFS_PATH2 = "/webmapcomplex/";
 
-    private static final String HYRACKS_APP_NAME = "giraph";
-    private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
-    private MiniDFSCluster dfsCluster;
+	private static final String DATA_PATH3 = "data/clique/clique.txt";
+	private static final String HDFS_PATH3 = "/clique/";
 
-    private JobConf conf = new JobConf();
-    private int numberOfNC = 2;
+	private static final String HYRACKS_APP_NAME = "pregelix";
+	private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR
+			+ File.separator + "conf.xml";
+	private MiniDFSCluster dfsCluster;
 
-    public void setUp() throws Exception {
-        ClusterConfig.setStorePath(PATH_TO_CLUSTER_STORE);
-        ClusterConfig.setClusterPropertiesPath(PATH_TO_CLUSTER_PROPERTIES);
-        cleanupStores();
-        PregelixHyracksIntegrationUtil.init();
-        PregelixHyracksIntegrationUtil.createApp(HYRACKS_APP_NAME);
-        LOGGER.info("Hyracks mini-cluster started");
-        FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
-        FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
-        startHDFS();
-    }
+	private JobConf conf = new JobConf();
+	private int numberOfNC = 2;
 
-    private void cleanupStores() throws IOException {
-        FileUtils.forceMkdir(new File("teststore"));
-        FileUtils.forceMkdir(new File("build"));
-        FileUtils.cleanDirectory(new File("teststore"));
-        FileUtils.cleanDirectory(new File("build"));
-    }
+	public void setUp() throws Exception {
+		ClusterConfig.setStorePath(PATH_TO_CLUSTER_STORE);
+		ClusterConfig.setClusterPropertiesPath(PATH_TO_CLUSTER_PROPERTIES);
+		cleanupStores();
+		PregelixHyracksIntegrationUtil.init("src/test/resources/topology.xml");
+		PregelixHyracksIntegrationUtil.createApp(HYRACKS_APP_NAME);
+		LOGGER.info("Hyracks mini-cluster started");
+		FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
+		FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
+		startHDFS();
+	}
 
-    private void startHDFS() throws IOException {
-        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
-        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
-        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
-        FileSystem lfs = FileSystem.getLocal(new Configuration());
-        lfs.delete(new Path("build"), true);
-        System.setProperty("hadoop.log.dir", "logs");
-        dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
-        FileSystem dfs = FileSystem.get(conf);
-        Path src = new Path(DATA_PATH);
-        Path dest = new Path(HDFS_PATH);
-        dfs.mkdirs(dest);
-        dfs.copyFromLocalFile(src, dest);
+	private void cleanupStores() throws IOException {
+		FileUtils.forceMkdir(new File("teststore"));
+		FileUtils.forceMkdir(new File("build"));
+		FileUtils.cleanDirectory(new File("teststore"));
+		FileUtils.cleanDirectory(new File("build"));
+	}
 
-        src = new Path(DATA_PATH2);
-        dest = new Path(HDFS_PATH2);
-        dfs.mkdirs(dest);
-        dfs.copyFromLocalFile(src, dest);
+	private void startHDFS() throws IOException {
+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
+		conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
+		FileSystem lfs = FileSystem.getLocal(new Configuration());
+		lfs.delete(new Path("build"), true);
+		System.setProperty("hadoop.log.dir", "logs");
+		dfsCluster = new MiniDFSCluster(conf, numberOfNC, true, null);
+		FileSystem dfs = FileSystem.get(conf);
+		Path src = new Path(DATA_PATH);
+		Path dest = new Path(HDFS_PATH);
+		dfs.mkdirs(dest);
+		dfs.copyFromLocalFile(src, dest);
 
-        DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
-        conf.writeXml(confOutput);
-        confOutput.flush();
-        confOutput.close();
-    }
+		src = new Path(DATA_PATH2);
+		dest = new Path(HDFS_PATH2);
+		dfs.mkdirs(dest);
+		dfs.copyFromLocalFile(src, dest);
 
-    /**
-     * cleanup hdfs cluster
-     */
-    private void cleanupHDFS() throws Exception {
-        dfsCluster.shutdown();
-    }
+		src = new Path(DATA_PATH3);
+		dest = new Path(HDFS_PATH3);
+		dfs.mkdirs(dest);
+		dfs.copyFromLocalFile(src, dest);
 
-    public void tearDown() throws Exception {
-        PregelixHyracksIntegrationUtil.destroyApp(HYRACKS_APP_NAME);
-        PregelixHyracksIntegrationUtil.deinit();
-        LOGGER.info("Hyracks mini-cluster shut down");
-        cleanupHDFS();
-    }
+		DataOutputStream confOutput = new DataOutputStream(
+				new FileOutputStream(new File(HADOOP_CONF_PATH)));
+		conf.writeXml(confOutput);
+		confOutput.flush();
+		confOutput.close();
+	}
 
-    public static Test suite() throws Exception {
-        List<String> ignores = getFileList(PATH_TO_IGNORE);
-        List<String> onlys = getFileList(PATH_TO_ONLY);
-        File testData = new File(PATH_TO_JOBS);
-        File[] queries = testData.listFiles();
-        RunJobTestSuite testSuite = new RunJobTestSuite();
-        testSuite.setUp();
-        boolean onlyEnabled = false;
+	/**
+	 * cleanup hdfs cluster
+	 */
+	private void cleanupHDFS() throws Exception {
+		dfsCluster.shutdown();
+	}
 
-        if (onlys.size() > 0) {
-            onlyEnabled = true;
-        }
-        for (File qFile : queries) {
-            if (isInList(ignores, qFile.getName()))
-                continue;
+	public void tearDown() throws Exception {
+		PregelixHyracksIntegrationUtil.destroyApp(HYRACKS_APP_NAME);
+		PregelixHyracksIntegrationUtil.deinit();
+		LOGGER.info("Hyracks mini-cluster shut down");
+		cleanupHDFS();
+	}
 
-            if (qFile.isFile()) {
-                if (onlyEnabled && !isInList(onlys, qFile.getName())) {
-                    continue;
-                } else {
-                    String resultFileName = ACTUAL_RESULT_DIR + File.separator + jobExtToResExt(qFile.getName());
-                    String expectedFileName = EXPECTED_RESULT_DIR + File.separator + jobExtToResExt(qFile.getName());
-                    testSuite.addTest(new RunJobTestCase(HADOOP_CONF_PATH, qFile.getName(), qFile.getAbsolutePath()
-                            .toString(), resultFileName, expectedFileName));
-                }
-            }
-        }
-        return testSuite;
-    }
+	public static Test suite() throws Exception {
+		List<String> ignores = getFileList(PATH_TO_IGNORE);
+		List<String> onlys = getFileList(PATH_TO_ONLY);
+		File testData = new File(PATH_TO_JOBS);
+		File[] queries = testData.listFiles();
+		RunJobTestSuite testSuite = new RunJobTestSuite();
+		testSuite.setUp();
+		boolean onlyEnabled = false;
 
-    /**
-     * Runs the tests and collects their result in a TestResult.
-     */
-    @Override
-    public void run(TestResult result) {
-        try {
-            int testCount = countTestCases();
-            for (int i = 0; i < testCount; i++) {
-                // cleanupStores();
-                Test each = this.testAt(i);
-                if (result.shouldStop())
-                    break;
-                runTest(each, result);
-            }
-            tearDown();
-        } catch (Exception e) {
-            throw new IllegalStateException(e);
-        }
-    }
+		if (onlys.size() > 0) {
+			onlyEnabled = true;
+		}
+		for (File qFile : queries) {
+			if (isInList(ignores, qFile.getName()))
+				continue;
 
-    protected static List<String> getFileList(String ignorePath) throws FileNotFoundException, IOException {
-        BufferedReader reader = new BufferedReader(new FileReader(ignorePath));
-        String s = null;
-        List<String> ignores = new ArrayList<String>();
-        while ((s = reader.readLine()) != null) {
-            ignores.add(s);
-        }
-        reader.close();
-        return ignores;
-    }
+			if (qFile.isFile()) {
+				if (onlyEnabled && !isInList(onlys, qFile.getName())) {
+					continue;
+				} else {
+					String resultFileName = ACTUAL_RESULT_DIR + File.separator
+							+ jobExtToResExt(qFile.getName());
+					String expectedFileName = EXPECTED_RESULT_DIR
+							+ File.separator + jobExtToResExt(qFile.getName());
+					testSuite.addTest(new RunJobTestCase(HADOOP_CONF_PATH,
+							qFile.getName(),
+							qFile.getAbsolutePath().toString(), resultFileName,
+							expectedFileName));
+				}
+			}
+		}
+		return testSuite;
+	}
 
-    private static String jobExtToResExt(String fname) {
-        int dot = fname.lastIndexOf('.');
-        return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
-    }
+	/**
+	 * Runs the tests and collects their result in a TestResult.
+	 */
+	@Override
+	public void run(TestResult result) {
+		try {
+			int testCount = countTestCases();
+			for (int i = 0; i < testCount; i++) {
+				// cleanupStores();
+				Test each = this.testAt(i);
+				if (result.shouldStop())
+					break;
+				runTest(each, result);
+			}
+			tearDown();
+		} catch (Exception e) {
+			throw new IllegalStateException(e);
+		}
+	}
 
-    private static boolean isInList(List<String> onlys, String name) {
-        for (String only : onlys)
-            if (name.indexOf(only) >= 0)
-                return true;
-        return false;
-    }
+	protected static List<String> getFileList(String ignorePath)
+			throws FileNotFoundException, IOException {
+		BufferedReader reader = new BufferedReader(new FileReader(ignorePath));
+		String s = null;
+		List<String> ignores = new ArrayList<String>();
+		while ((s = reader.readLine()) != null) {
+			ignores.add(s);
+		}
+		reader.close();
+		return ignores;
+	}
+
+	private static String jobExtToResExt(String fname) {
+		int dot = fname.lastIndexOf('.');
+		return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
+	}
+
+	private static boolean isInList(List<String> onlys, String name) {
+		for (String only : onlys)
+			if (name.indexOf(only) >= 0)
+				return true;
+		return false;
+	}
 
 }
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/util/TestUtils.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/util/TestUtils.java
index 1b22b47..d89ec46 100644
--- a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/util/TestUtils.java
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/util/TestUtils.java
@@ -62,8 +62,8 @@
             if (row1.equals(row2))
                 continue;
 
-            String[] fields1 = row1.split(",");
-            String[] fields2 = row2.split(",");
+            String[] fields1 = row1.split(" ");
+            String[] fields2 = row2.split(" ");
 
             for (int j = 0; j < fields1.length; j++) {
                 if (fields1[j].equals(fields2[j])) {
@@ -71,8 +71,6 @@
                 } else if (fields1[j].indexOf('.') < 0) {
                     return false;
                 } else {
-                    fields1[j] = fields1[j].split("=")[1];
-                    fields2[j] = fields2[j].split("=")[1];
                     Double double1 = Double.parseDouble(fields1[j]);
                     Double double2 = Double.parseDouble(fields2[j]);
                     float float1 = (float) double1.doubleValue();
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result
index b8efedc..45376e2 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0, edges=(1,))
-1|Vertex(id=1,value=0, edges=(1,2,))
-2|Vertex(id=2,value=0, edges=(1,2,3,))
-3|Vertex(id=3,value=0, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0, edges=(11,))
-11|Vertex(id=11,value=0, edges=(11,12,))
-12|Vertex(id=12,value=0, edges=(11,12,13,))
-13|Vertex(id=13,value=0, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0
+1 0
+2 0
+3 0
+4 0
+5 0
+6 0
+7 0
+8 0
+9 0
+10 0
+11 0
+12 0
+13 0
+14 0
+15 0
+16 0
+17 0
+18 0
+19 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result
index ad448b2..dbc30fc 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result
@@ -1,23 +1,23 @@
-0|Vertex(id=0,value=0, edges=(1,50,))
-1|Vertex(id=1,value=0, edges=(1,2,))
-2|Vertex(id=2,value=0, edges=(1,2,3,))
-3|Vertex(id=3,value=0, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0, edges=(11,99,))
-11|Vertex(id=11,value=0, edges=(11,12,101,))
-12|Vertex(id=12,value=0, edges=(11,12,13,))
-13|Vertex(id=13,value=0, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0, edges=(0,11,12,13,14,15,16,17,18,19,))
-21|Vertex(id=21,value=21, edges=(22,23,24,))
-25|Vertex(id=25,value=25, edges=())
-27|Vertex(id=27,value=27, edges=())
+0 0
+1 0
+2 0
+3 0
+4 0
+5 0
+6 0
+7 0
+8 0
+9 0
+10 0
+11 0
+12 0
+13 0
+14 0
+15 0
+16 0
+17 0
+18 0
+19 0
+21 21
+25 25
+27 27
diff --git a/pregelix/pregelix-example/src/test/resources/expected/GraphMutation.result b/pregelix/pregelix-example/src/test/resources/expected/GraphMutation.result
new file mode 100644
index 0000000..a30166c
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/GraphMutation.result
@@ -0,0 +1,13 @@
+1 0.0
+5 0.0
+7 0.0
+11 0.0
+13 0.0
+17 0.0
+19 0.0
+100 0.0
+500 0.0
+700 0.0
+1100 0.0
+1300 0.0
+1700 0.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result
new file mode 100644
index 0000000..d238037
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result
@@ -0,0 +1,7 @@
+1 1,2,3,4;
+2 2,3,4;
+3 
+4 
+5 
+6 
+7 
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRank.result b/pregelix/pregelix-example/src/test/resources/expected/PageRank.result
index f38e191..9c4d83a 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRank.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRank.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0.008290140026154316, edges=(1,))
-1|Vertex(id=1,value=0.1535152819247165, edges=(1,2,))
-2|Vertex(id=2,value=0.14646839195826475, edges=(1,2,3,))
-3|Vertex(id=3,value=0.08125113985998214, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0.03976979906329426, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0.0225041581462058, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0.015736276824953852, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0.012542224114863661, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0.010628239626209894, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0.009294348455354817, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0.008290140026154316, edges=(11,))
-11|Vertex(id=11,value=0.15351528192471647, edges=(11,12,))
-12|Vertex(id=12,value=0.14646839195826472, edges=(11,12,13,))
-13|Vertex(id=13,value=0.08125113985998214, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0.03976979906329425, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0.0225041581462058, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0.015736276824953852, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0.012542224114863661, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0.010628239626209894, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0.009294348455354817, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0.008290140026154316
+1 0.1535152819247165
+2 0.14646839195826475
+3 0.08125113985998214
+4 0.03976979906329426
+5 0.0225041581462058
+6 0.015736276824953852
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.15351528192471647
+12 0.14646839195826472
+13 0.08125113985998214
+14 0.03976979906329425
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result
index ab05d38..6432eda 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0.008290140026154316, edges=(1,))
-1|Vertex(id=1,value=0.1535152819247165, edges=(1,2,))
-2|Vertex(id=2,value=0.14646839195826475, edges=(1,2,3,))
-3|Vertex(id=3,value=0.08125113985998214, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0.03976979906329426, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0.0225041581462058, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0.015736276824953852, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0.012542224114863661, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0.010628239626209894, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0.009294348455354817, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0.008290140026154316, edges=(11,))
-11|Vertex(id=11,value=0.15351528192471647, edges=(11,12,))
-12|Vertex(id=12,value=0.14646839195826472, edges=(11,12,13,))
-13|Vertex(id=13,value=0.08125113985998214, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0.03976979906329426, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0.0225041581462058, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0.015736276824953852, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0.012542224114863661, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0.010628239626209894, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0.009294348455354817, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0.008290140026154316
+1 0.1535152819247165
+2 0.14646839195826475
+3 0.08125113985998214
+4 0.03976979906329426
+5 0.0225041581462058
+6 0.015736276824953852
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.15351528192471647
+12 0.14646839195826472
+13 0.08125113985998214
+14 0.03976979906329426
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result
index 1fc108a..2bd09e1 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result
@@ -1,23 +1,23 @@
-0|Vertex(id=0,value=0.0072088164890121405, edges=(1,50,))
-1|Vertex(id=1,value=0.12352056961948686, edges=(1,2,))
-2|Vertex(id=2,value=0.12045670441668178, edges=(1,2,3,))
-3|Vertex(id=3,value=0.06798545786459467, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0.03387281259892814, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0.01942600635480669, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0.013661020012182747, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0.0109034351563503, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0.009241684574402657, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0.008082028259564783, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0.007208817414047232, edges=(11,99,))
-11|Vertex(id=11,value=0.07555839219845861, edges=(11,12,101,))
-12|Vertex(id=12,value=0.07249452699565352, edges=(11,12,13,))
-13|Vertex(id=13,value=0.05063539695954156, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0.029644452692487822, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0.018670183493927354, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0.013558283213067561, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0.010892790899883237, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0.009240874593661061, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0.008081987856433137, edges=(0,11,12,13,14,15,16,17,18,19,))
-21|Vertex(id=21,value=0.006521739130434782, edges=(22,23,24,))
-25|Vertex(id=25,value=0.006521739130434782, edges=())
-27|Vertex(id=27,value=0.006521739130434782, edges=())
+0 0.0072088164890121405
+1 0.12352056961948686
+2 0.12045670441668178
+3 0.06798545786459467
+4 0.03387281259892814
+5 0.01942600635480669
+6 0.013661020012182747
+7 0.0109034351563503
+8 0.009241684574402657
+9 0.008082028259564783
+10 0.007208817414047232
+11 0.07555839219845861
+12 0.07249452699565352
+13 0.05063539695954156
+14 0.029644452692487822
+15 0.018670183493927354
+16 0.013558283213067561
+17 0.010892790899883237
+18 0.009240874593661061
+19 0.008081987856433137
+21 0.006521739130434782
+25 0.006521739130434782
+27 0.006521739130434782
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result
new file mode 100644
index 0000000..6432eda
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result
@@ -0,0 +1,20 @@
+0 0.008290140026154316
+1 0.1535152819247165
+2 0.14646839195826475
+3 0.08125113985998214
+4 0.03976979906329426
+5 0.0225041581462058
+6 0.015736276824953852
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.15351528192471647
+12 0.14646839195826472
+13 0.08125113985998214
+14 0.03976979906329426
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result
index ab05d38..9a747a6 100755
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0.008290140026154316, edges=(1,))
-1|Vertex(id=1,value=0.1535152819247165, edges=(1,2,))
-2|Vertex(id=2,value=0.14646839195826475, edges=(1,2,3,))
-3|Vertex(id=3,value=0.08125113985998214, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0.03976979906329426, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0.0225041581462058, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0.015736276824953852, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0.012542224114863661, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0.010628239626209894, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0.009294348455354817, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0.008290140026154316, edges=(11,))
-11|Vertex(id=11,value=0.15351528192471647, edges=(11,12,))
-12|Vertex(id=12,value=0.14646839195826472, edges=(11,12,13,))
-13|Vertex(id=13,value=0.08125113985998214, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0.03976979906329426, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0.0225041581462058, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0.015736276824953852, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0.012542224114863661, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0.010628239626209894, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0.009294348455354817, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0.008290140026154316
+1 0.15351528192471647
+2 0.14646839195826475
+3 0.08125113985998211
+4 0.03976979906329425
+5 0.0225041581462058
+6 0.01573627682495385
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.1535152819247165
+12 0.14646839195826475
+13 0.08125113985998214
+14 0.03976979906329426
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result
index 74113a8..a1dfc0f 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result
@@ -1,23 +1,23 @@
-0|Vertex(id=0,value=2, edges=(1,50,))
-1|Vertex(id=1,value=3, edges=(1,2,))
-2|Vertex(id=2,value=1, edges=(1,2,3,))
-3|Vertex(id=3,value=1, edges=(1,2,3,4,))
-4|Vertex(id=4,value=1, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=1, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=1, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=1, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=1, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=1, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=3, edges=(11,99,))
-11|Vertex(id=11,value=2, edges=(11,12,101,))
-12|Vertex(id=12,value=2, edges=(11,12,13,))
-13|Vertex(id=13,value=2, edges=(11,12,13,14,))
-14|Vertex(id=14,value=2, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=2, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=2, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=2, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=2, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=2, edges=(0,11,12,13,14,15,16,17,18,19,))
-21|Vertex(id=21,value=0, edges=(22,23,24,))
-25|Vertex(id=25,value=0, edges=())
-27|Vertex(id=27,value=0, edges=())
+0 2
+1 3
+2 1
+3 1
+4 1
+5 1
+6 1
+7 1
+8 1
+9 1
+10 3
+11 2
+12 2
+13 2
+14 2
+15 2
+16 2
+17 2
+18 2
+19 2
+21 0
+25 0
+27 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result
index ea0edc2..1693fb2 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result
@@ -1,23 +1,23 @@
-0|Vertex(id=0,value=1, edges=(1,50,))
-1|Vertex(id=1,value=1, edges=(1,2,))
-2|Vertex(id=2,value=1, edges=(1,2,3,))
-3|Vertex(id=3,value=1, edges=(1,2,3,4,))
-4|Vertex(id=4,value=1, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=1, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=1, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=1, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=1, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=1, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=1, edges=(11,99,))
-11|Vertex(id=11,value=1, edges=(11,12,101,))
-12|Vertex(id=12,value=1, edges=(11,12,13,))
-13|Vertex(id=13,value=1, edges=(11,12,13,14,))
-14|Vertex(id=14,value=1, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=1, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=1, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=1, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=1, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=1, edges=(0,11,12,13,14,15,16,17,18,19,))
-21|Vertex(id=21,value=0, edges=(22,23,24,))
-25|Vertex(id=25,value=2, edges=())
-27|Vertex(id=27,value=0, edges=())
+0 1
+1 1
+2 1
+3 1
+4 1
+5 1
+6 1
+7 1
+8 1
+9 1
+10 1
+11 1
+12 1
+13 1
+14 1
+15 1
+16 1
+17 1
+18 1
+19 1
+21 0
+25 2
+27 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result
index 7bb0ca3..46d1c73 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0.0, edges=(1,))
-1|Vertex(id=1,value=0.0, edges=(1,2,))
-2|Vertex(id=2,value=100.0, edges=(1,2,3,))
-3|Vertex(id=3,value=300.0, edges=(1,2,3,4,))
-4|Vertex(id=4,value=600.0, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=1000.0, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=1500.0, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=2100.0, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=2800.0, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=3600.0, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=4500.0, edges=(11,))
-11|Vertex(id=11,value=5500.0, edges=(11,12,))
-12|Vertex(id=12,value=6600.0, edges=(11,12,13,))
-13|Vertex(id=13,value=7800.0, edges=(11,12,13,14,))
-14|Vertex(id=14,value=9100.0, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=10500.0, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=12000.0, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=13600.0, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=15300.0, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=17100.0, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0.0
+1 0.0
+2 100.0
+3 300.0
+4 600.0
+5 1000.0
+6 1500.0
+7 2100.0
+8 2800.0
+9 3600.0
+10 4500.0
+11 5500.0
+12 6600.0
+13 7800.0
+14 9100.0
+15 10500.0
+16 12000.0
+17 13600.0
+18 15300.0
+19 17100.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result
index f2c31a6..b42462f 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0.0, edges=(1,))
-1|Vertex(id=1,value=1.0, edges=(1,2,))
-2|Vertex(id=2,value=2.0, edges=(1,2,3,))
-3|Vertex(id=3,value=3.0, edges=(1,2,3,4,))
-4|Vertex(id=4,value=4.0, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=5.0, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=6.0, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=7.0, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=8.0, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=9.0, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=10.0, edges=(11,))
-11|Vertex(id=11,value=11.0, edges=(11,12,))
-12|Vertex(id=12,value=12.0, edges=(11,12,13,))
-13|Vertex(id=13,value=13.0, edges=(11,12,13,14,))
-14|Vertex(id=14,value=14.0, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=15.0, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=16.0, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=17.0, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=18.0, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=19.0, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0.0
+1 1.0
+2 2.0
+3 3.0
+4 4.0
+5 5.0
+6 6.0
+7 7.0
+8 8.0
+9 9.0
+10 10.0
+11 11.0
+12 12.0
+13 13.0
+14 14.0
+15 15.0
+16 16.0
+17 17.0
+18 18.0
+19 19.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result
new file mode 100644
index 0000000..4818e13
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result
@@ -0,0 +1,7 @@
+1 3
+2 2
+3 0
+4 0
+5 1
+6 0
+7 0
diff --git a/pregelix/pregelix-example/src/test/resources/hadoop/conf/log4j.properties b/pregelix/pregelix-example/src/test/resources/hadoop/conf/log4j.properties
index d5e6004..3335964 100755
--- a/pregelix/pregelix-example/src/test/resources/hadoop/conf/log4j.properties
+++ b/pregelix/pregelix-example/src/test/resources/hadoop/conf/log4j.properties
@@ -76,7 +76,7 @@
 # FSNamesystem Audit logging
 # All audit events are logged at INFO level
 #
-log4j.logger.org.apache.hadoop.fs.FSNamesystem.audit=WARN
+log4j.logger.org.apache.hadoop=FATAL
 
 # Custom Logging levels
 
diff --git a/pregelix/pregelix-example/src/test/resources/hadoop/conf/mapred-site.xml b/pregelix/pregelix-example/src/test/resources/hadoop/conf/mapred-site.xml
index 1b9a4d6..71450f1 100644
--- a/pregelix/pregelix-example/src/test/resources/hadoop/conf/mapred-site.xml
+++ b/pregelix/pregelix-example/src/test/resources/hadoop/conf/mapred-site.xml
@@ -18,8 +18,8 @@
       <value>20</value>
    </property>
    <property>
-      <name>mapred.min.split.size</name>
-      <value>65536</value>
+      <name>mapred.max.split.size</name>
+      <value>128</value>
    </property>
 
 </configuration>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/GraphMutation.xml b/pregelix/pregelix-example/src/test/resources/jobs/GraphMutation.xml
new file mode 100644
index 0000000..9f51f6d
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/GraphMutation.xml
@@ -0,0 +1,141 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmap</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/result</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>Graph Mutation</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>20</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.GraphMutationVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.GraphMutationVertex$SimpleGraphMutationVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml
new file mode 100644
index 0000000..616c647
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml
@@ -0,0 +1,142 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/clique</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultclique</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>Maximal Clique</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex$MaximalCliqueVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.maximalclique.TextMaximalCliqueInputFormat</value></property>
+<property><name>pregelix.aggregatorClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueAggregator</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>pregelix.incStateLength</name><value>true</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml
index e425b38..744e5b0 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml
@@ -123,7 +123,7 @@
 <property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
 <property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
 <property><name>hadoop.logfile.size</name><value>10000000</value></property>
-<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexInputFormat</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimulatedPageRankVertexInputFormat</value></property>
 <property><name>mapred.job.queue.name</name><value>default</value></property>
 <property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
 <property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml
new file mode 100644
index 0000000..c1a04ae
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmap</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/result</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>20</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimpleSumCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>pregelix.incStateLength</name><value>true</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml
index 3719247..9e791e2 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml
@@ -124,7 +124,7 @@
 <property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
 <property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
 <property><name>hadoop.logfile.size</name><value>10000000</value></property>
-<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexInputFormat</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimulatedPageRankVertexInputFormat</value></property>
 <property><name>mapred.job.queue.name</name><value>default</value></property>
 <property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
 <property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml b/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml
new file mode 100644
index 0000000..0f44f4d
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml
@@ -0,0 +1,141 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/clique</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultclique</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>Triangle Counting</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex$TriangleCountingVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.trianglecounting.TextTriangleCountingInputFormat</value></property>
+<property><name>pregelix.aggregatorClass</name><value>edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingAggregator</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/topology.xml b/pregelix/pregelix-example/src/test/resources/topology.xml
new file mode 100755
index 0000000..2a6c380
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/topology.xml
@@ -0,0 +1,7 @@
+<cluster-topology>
+    <network-switch name="Global">
+        <network-switch name="local">
+            <terminal name="127.1.0.1"/>
+        </network-switch>
+    </network-switch>
+</cluster-topology>
\ No newline at end of file
diff --git a/pregelix/pregelix-runtime/pom.xml b/pregelix/pregelix-runtime/pom.xml
index d12cb36..e75f98b 100644
--- a/pregelix/pregelix-runtime/pom.xml
+++ b/pregelix/pregelix-runtime/pom.xml
@@ -22,8 +22,9 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
 				</configuration>
 			</plugin>
 			<plugin>
@@ -42,6 +43,7 @@
 			</plugin>
 			<plugin>
 				<artifactId>maven-clean-plugin</artifactId>
+				<version>2.5</version>
 				<configuration>
 					<filesets>
 						<fileset>
@@ -116,13 +118,6 @@
 			<version>0.2.3-SNAPSHOT</version>
 		</dependency>
 		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-core</artifactId>
-			<version>0.20.2</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-storage-am-common</artifactId>
 			<version>0.2.3-SNAPSHOT</version>
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/NCBootstrapImpl.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/NCBootstrapImpl.java
index e7caeaf..76c725e 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/NCBootstrapImpl.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/bootstrap/NCBootstrapImpl.java
@@ -34,7 +34,7 @@
 
     @Override
     public void stop() throws Exception {
-        LOGGER.info("Stopping Giraph NC Bootstrap");
+        LOGGER.info("Stopping NC Bootstrap");
         RuntimeContext rCtx = (RuntimeContext) appCtx.getApplicationObject();
         rCtx.close();
     }
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/ComputeUpdateFunctionFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/ComputeUpdateFunctionFactory.java
index 105d3e2..f7958d9 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/ComputeUpdateFunctionFactory.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/ComputeUpdateFunctionFactory.java
@@ -21,6 +21,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
 
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
@@ -58,6 +59,8 @@
             private final ArrayTupleBuilder tbAlive = new ArrayTupleBuilder(2);
             private final ArrayTupleBuilder tbTerminate = new ArrayTupleBuilder(1);
             private final ArrayTupleBuilder tbGlobalAggregate = new ArrayTupleBuilder(1);
+            private final ArrayTupleBuilder tbInsert = new ArrayTupleBuilder(2);
+            private final ArrayTupleBuilder tbDelete = new ArrayTupleBuilder(1);
 
             // for writing out to message channel
             private IFrameWriter writerMsg;
@@ -82,6 +85,16 @@
             private ByteBuffer bufferGlobalAggregate;
             private GlobalAggregator aggregator;
 
+            // for writing out to insert vertex channel
+            private IFrameWriter writerInsert;
+            private FrameTupleAppender appenderInsert;
+            private ByteBuffer bufferInsert;
+
+            // for writing out to delete vertex channel
+            private IFrameWriter writerDelete;
+            private FrameTupleAppender appenderDelete;
+            private ByteBuffer bufferDelete;
+
             private Vertex vertex;
             private ResetableByteArrayOutputStream bbos = new ResetableByteArrayOutputStream();
             private DataOutput output = new DataOutputStream(bbos);
@@ -90,11 +103,15 @@
             private final List<IFrameWriter> writers = new ArrayList<IFrameWriter>();
             private final List<FrameTupleAppender> appenders = new ArrayList<FrameTupleAppender>();
             private final List<ArrayTupleBuilder> tbs = new ArrayList<ArrayTupleBuilder>();
+            private Configuration conf;
+            private boolean dynamicStateLength;
 
             @Override
             public void open(IHyracksTaskContext ctx, RecordDescriptor rd, IFrameWriter... writers)
                     throws HyracksDataException {
-                this.aggregator = BspUtils.createGlobalAggregator(confFactory.createConfiguration());
+                this.conf = confFactory.createConfiguration();
+                this.dynamicStateLength = BspUtils.getDynamicVertexValueSize(conf);
+                this.aggregator = BspUtils.createGlobalAggregator(conf);
                 this.aggregator.init();
 
                 this.writerMsg = writers[0];
@@ -114,8 +131,22 @@
                 this.appenderGlobalAggregate = new FrameTupleAppender(ctx.getFrameSize());
                 this.appenderGlobalAggregate.reset(bufferGlobalAggregate, true);
 
-                if (writers.length > 3) {
-                    this.writerAlive = writers[3];
+                this.writerInsert = writers[3];
+                this.bufferInsert = ctx.allocateFrame();
+                this.appenderInsert = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderInsert.reset(bufferInsert, true);
+                this.writers.add(writerInsert);
+                this.appenders.add(appenderInsert);
+
+                this.writerDelete = writers[4];
+                this.bufferDelete = ctx.allocateFrame();
+                this.appenderDelete = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderDelete.reset(bufferDelete, true);
+                this.writers.add(writerDelete);
+                this.appenders.add(appenderDelete);
+
+                if (writers.length > 5) {
+                    this.writerAlive = writers[5];
                     this.bufferAlive = ctx.allocateFrame();
                     this.appenderAlive = new FrameTupleAppender(ctx.getFrameSize());
                     this.appenderAlive.reset(bufferAlive, true);
@@ -125,6 +156,8 @@
                 }
 
                 tbs.add(tbMsg);
+                tbs.add(tbInsert);
+                tbs.add(tbDelete);
                 tbs.add(tbAlive);
             }
 
@@ -155,7 +188,7 @@
                 /**
                  * this partition should not terminate
                  */
-                if (terminate && (!vertex.isHalted() || vertex.hasMessage()))
+                if (terminate && (!vertex.isHalted() || vertex.hasMessage() || vertex.createdNewLiveVertex()))
                     terminate = false;
 
                 aggregator.step(vertex);
@@ -164,6 +197,9 @@
             @Override
             public void close() throws HyracksDataException {
                 FrameTupleUtils.flushTuplesFinal(appenderMsg, writerMsg);
+                FrameTupleUtils.flushTuplesFinal(appenderInsert, writerInsert);
+                FrameTupleUtils.flushTuplesFinal(appenderDelete, writerDelete);
+
                 if (pushAlive)
                     FrameTupleUtils.flushTuplesFinal(appenderAlive, writerAlive);
                 if (!terminate) {
@@ -177,7 +213,8 @@
             private void writeOutGlobalAggregate() throws HyracksDataException {
                 try {
                     /**
-                     * get partial aggregate result and flush to the final aggregator
+                     * get partial aggregate result and flush to the final
+                     * aggregator
                      */
                     Writable agg = aggregator.finishPartial();
                     agg.write(tbGlobalAggregate.getDataOutput());
@@ -203,15 +240,27 @@
             }
 
             @Override
-            public void update(ITupleReference tupleRef) throws HyracksDataException {
+            public void update(ITupleReference tupleRef, ArrayTupleBuilder cloneUpdateTb) throws HyracksDataException {
                 try {
                     if (vertex != null && vertex.hasUpdate()) {
-                        int fieldCount = tupleRef.getFieldCount();
-                        for (int i = 1; i < fieldCount; i++) {
-                            byte[] data = tupleRef.getFieldData(i);
-                            int offset = tupleRef.getFieldStart(i);
-                            bbos.setByteArray(data, offset);
-                            vertex.write(output);
+                        if (!dynamicStateLength) {
+                            // in-place update
+                            int fieldCount = tupleRef.getFieldCount();
+                            for (int i = 1; i < fieldCount; i++) {
+                                byte[] data = tupleRef.getFieldData(i);
+                                int offset = tupleRef.getFieldStart(i);
+                                bbos.setByteArray(data, offset);
+                                vertex.write(output);
+                            }
+                        } else {
+                            // write the vertex id
+                            DataOutput tbOutput = cloneUpdateTb.getDataOutput();
+                            vertex.getVertexId().write(tbOutput);
+                            cloneUpdateTb.addFieldEndOffset();
+
+                            // write the vertex value
+                            vertex.write(tbOutput);
+                            cloneUpdateTb.addFieldEndOffset();
                         }
                     }
                 } catch (IOException e) {
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/StartComputeUpdateFunctionFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/StartComputeUpdateFunctionFactory.java
index f72b059..0cf64a0 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/StartComputeUpdateFunctionFactory.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/StartComputeUpdateFunctionFactory.java
@@ -21,6 +21,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
 
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
@@ -58,6 +59,8 @@
             private final ArrayTupleBuilder tbAlive = new ArrayTupleBuilder(2);
             private final ArrayTupleBuilder tbTerminate = new ArrayTupleBuilder(1);
             private final ArrayTupleBuilder tbGlobalAggregate = new ArrayTupleBuilder(1);
+            private final ArrayTupleBuilder tbInsert = new ArrayTupleBuilder(2);
+            private final ArrayTupleBuilder tbDelete = new ArrayTupleBuilder(1);
 
             // for writing out to message channel
             private IFrameWriter writerMsg;
@@ -76,12 +79,22 @@
             private ByteBuffer bufferGlobalAggregate;
             private GlobalAggregator aggregator;
 
-            //for writing out the global aggregate
+            // for writing out the global aggregate
             private IFrameWriter writerTerminate;
             private FrameTupleAppender appenderTerminate;
             private ByteBuffer bufferTerminate;
             private boolean terminate = true;
 
+            // for writing out to insert vertex channel
+            private IFrameWriter writerInsert;
+            private FrameTupleAppender appenderInsert;
+            private ByteBuffer bufferInsert;
+
+            // for writing out to delete vertex channel
+            private IFrameWriter writerDelete;
+            private FrameTupleAppender appenderDelete;
+            private ByteBuffer bufferDelete;
+
             // dummy empty msgList
             private MsgList msgList = new MsgList();
             private ArrayIterator msgIterator = new ArrayIterator();
@@ -93,11 +106,15 @@
             private final List<IFrameWriter> writers = new ArrayList<IFrameWriter>();
             private final List<FrameTupleAppender> appenders = new ArrayList<FrameTupleAppender>();
             private final List<ArrayTupleBuilder> tbs = new ArrayList<ArrayTupleBuilder>();
+            private Configuration conf;
+            private boolean dynamicStateLength;
 
             @Override
             public void open(IHyracksTaskContext ctx, RecordDescriptor rd, IFrameWriter... writers)
                     throws HyracksDataException {
-                this.aggregator = BspUtils.createGlobalAggregator(confFactory.createConfiguration());
+                this.conf = confFactory.createConfiguration();
+                this.dynamicStateLength = BspUtils.getDynamicVertexValueSize(conf);
+                this.aggregator = BspUtils.createGlobalAggregator(conf);
                 this.aggregator.init();
 
                 this.writerMsg = writers[0];
@@ -117,8 +134,22 @@
                 this.appenderGlobalAggregate = new FrameTupleAppender(ctx.getFrameSize());
                 this.appenderGlobalAggregate.reset(bufferGlobalAggregate, true);
 
-                if (writers.length > 3) {
-                    this.writerAlive = writers[3];
+                this.writerInsert = writers[3];
+                this.bufferInsert = ctx.allocateFrame();
+                this.appenderInsert = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderInsert.reset(bufferInsert, true);
+                this.writers.add(writerInsert);
+                this.appenders.add(appenderInsert);
+
+                this.writerDelete = writers[4];
+                this.bufferDelete = ctx.allocateFrame();
+                this.appenderDelete = new FrameTupleAppender(ctx.getFrameSize());
+                this.appenderDelete.reset(bufferDelete, true);
+                this.writers.add(writerDelete);
+                this.appenders.add(appenderDelete);
+
+                if (writers.length > 5) {
+                    this.writerAlive = writers[5];
                     this.bufferAlive = ctx.allocateFrame();
                     this.appenderAlive = new FrameTupleAppender(ctx.getFrameSize());
                     this.appenderAlive.reset(bufferAlive, true);
@@ -129,6 +160,8 @@
                 msgList.reset(msgIterator);
 
                 tbs.add(tbMsg);
+                tbs.add(tbInsert);
+                tbs.add(tbDelete);
                 tbs.add(tbAlive);
             }
 
@@ -156,7 +189,7 @@
                 /**
                  * this partition should not terminate
                  */
-                if (terminate && (!vertex.isHalted() || vertex.hasMessage()))
+                if (terminate && (!vertex.isHalted() || vertex.hasMessage() || vertex.createdNewLiveVertex()))
                     terminate = false;
 
                 /**
@@ -168,20 +201,24 @@
             @Override
             public void close() throws HyracksDataException {
                 FrameTupleUtils.flushTuplesFinal(appenderMsg, writerMsg);
+                FrameTupleUtils.flushTuplesFinal(appenderInsert, writerInsert);
+                FrameTupleUtils.flushTuplesFinal(appenderDelete, writerDelete);
+
                 if (pushAlive)
                     FrameTupleUtils.flushTuplesFinal(appenderAlive, writerAlive);
                 if (!terminate) {
                     writeOutTerminationState();
                 }
-                
-                /**write out global aggregate value*/
+
+                /** write out global aggregate value */
                 writeOutGlobalAggregate();
             }
 
             private void writeOutGlobalAggregate() throws HyracksDataException {
                 try {
                     /**
-                     * get partial aggregate result and flush to the final aggregator
+                     * get partial aggregate result and flush to the final
+                     * aggregator
                      */
                     Writable agg = aggregator.finishPartial();
                     agg.write(tbGlobalAggregate.getDataOutput());
@@ -207,15 +244,27 @@
             }
 
             @Override
-            public void update(ITupleReference tupleRef) throws HyracksDataException {
+            public void update(ITupleReference tupleRef, ArrayTupleBuilder cloneUpdateTb) throws HyracksDataException {
                 try {
                     if (vertex != null && vertex.hasUpdate()) {
-                        int fieldCount = tupleRef.getFieldCount();
-                        for (int i = 1; i < fieldCount; i++) {
-                            byte[] data = tupleRef.getFieldData(i);
-                            int offset = tupleRef.getFieldStart(i);
-                            bbos.setByteArray(data, offset);
-                            vertex.write(output);
+                        if (!dynamicStateLength) {
+                            // in-place update
+                            int fieldCount = tupleRef.getFieldCount();
+                            for (int i = 1; i < fieldCount; i++) {
+                                byte[] data = tupleRef.getFieldData(i);
+                                int offset = tupleRef.getFieldStart(i);
+                                bbos.setByteArray(data, offset);
+                                vertex.write(output);
+                            }
+                        } else {
+                            // write the vertex id
+                            DataOutput tbOutput = cloneUpdateTb.getDataOutput();
+                            vertex.getVertexId().write(tbOutput);
+                            cloneUpdateTb.addFieldEndOffset();
+
+                            // write the vertex value
+                            vertex.write(tbOutput);
+                            cloneUpdateTb.addFieldEndOffset();
                         }
                     }
                 } catch (IOException e) {
@@ -224,5 +273,4 @@
             }
         };
     }
-
 }
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/RuntimeHookFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/RuntimeHookFactory.java
index f7d0018..c025f85 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/RuntimeHookFactory.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/RuntimeHookFactory.java
@@ -15,12 +15,12 @@
 package edu.uci.ics.pregelix.runtime.touchpoint;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Mapper.Context;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.hdfs.ContextFactory;
 import edu.uci.ics.pregelix.api.graph.Vertex;
 import edu.uci.ics.pregelix.api.util.BspUtils;
 import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
@@ -40,14 +40,13 @@
     public IRuntimeHook createRuntimeHook() {
 
         return new IRuntimeHook() {
+            private ContextFactory ctxFactory = new ContextFactory();
 
-            @SuppressWarnings({ "rawtypes", "unchecked" })
             @Override
             public void configure(IHyracksTaskContext ctx) throws HyracksDataException {
                 Configuration conf = confFactory.createConfiguration();
                 try {
-                    Context mapperContext = new Mapper().new Context(conf, new TaskAttemptID(), null, null, null, null,
-                            null);
+                    TaskAttemptContext mapperContext = ctxFactory.createContext(conf, new TaskAttemptID());
                     Vertex.setContext(mapperContext);
                     BspUtils.setDefaultConfiguration(conf);
                 } catch (Exception e) {