merge from master
diff --git a/.gitignore b/.gitignore
index 32bcd37..b516c03 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,3 +9,6 @@
 asterix_logs
 build
 bin
+asterix-app/rttest
+asterix-app/mdtest/
+asterix-app/opttest/
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
index d8477f1..718c0f1 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
@@ -20,7 +20,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -296,11 +295,6 @@
     }
 
     @Override
-    public ILogicalOperator visitDieOperator(DieOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
     public ILogicalOperator visitNestedTupleSourceOperator(NestedTupleSourceOperator op, ILogicalOperator arg)
             throws AlgebricksException {
         NestedTupleSourceOperator opCopy = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(arg));
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/InvertedIndexPOperator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/InvertedIndexPOperator.java
index 49ca5ba..48f9e36 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/InvertedIndexPOperator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/InvertedIndexPOperator.java
@@ -3,7 +3,8 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfo;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
 import edu.uci.ics.asterix.common.context.AsterixRuntimeComponentsProvider;
 import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
 import edu.uci.ics.asterix.metadata.MetadataException;
@@ -196,20 +197,22 @@
             IBinaryTokenizerFactory queryTokenizerFactory = InvertedIndexAccessMethod.getBinaryTokenizerFactory(
                     searchModifierType, searchKeyType, secondaryIndex);
             IIndexDataflowHelperFactory dataflowHelperFactory;
+
+            AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
             if (!isPartitioned) {
                 dataflowHelperFactory = new LSMInvertedIndexDataflowHelperFactory(
                         AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
-                        GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES);
+                        storageProperties.getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages());
             } else {
                 dataflowHelperFactory = new PartitionedLSMInvertedIndexDataflowHelperFactory(
                         AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
-                        GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES);
+                        storageProperties.getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages());
             }
             LSMInvertedIndexSearchOperatorDescriptor invIndexSearchOp = new LSMInvertedIndexSearchOperatorDescriptor(
                     jobSpec, queryField, appContext.getStorageManagerInterface(), secondarySplitsAndConstraint.first,
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
index 3f434b7..d5169c7 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
@@ -31,10 +31,12 @@
 import edu.uci.ics.asterix.optimizer.rules.IntroduceDynamicTypeCastRule;
 import edu.uci.ics.asterix.optimizer.rules.IntroduceEnforcedListTypeRule;
 import edu.uci.ics.asterix.optimizer.rules.IntroduceInstantLockSearchCallbackRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceRapidFrameFlushProjectRule;
 import edu.uci.ics.asterix.optimizer.rules.IntroduceSecondaryIndexInsertDeleteRule;
 import edu.uci.ics.asterix.optimizer.rules.IntroduceStaticTypeCastRule;
 import edu.uci.ics.asterix.optimizer.rules.LoadRecordFieldsRule;
 import edu.uci.ics.asterix.optimizer.rules.NestGroupByRule;
+import edu.uci.ics.asterix.optimizer.rules.NestedSubplanToJoinRule;
 import edu.uci.ics.asterix.optimizer.rules.PullPositionalVariableFromUnnestRule;
 import edu.uci.ics.asterix.optimizer.rules.PushAggFuncIntoStandaloneAggregateRule;
 import edu.uci.ics.asterix.optimizer.rules.PushAggregateIntoGroupbyRule;
@@ -80,7 +82,6 @@
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.PullSelectOutOfEqJoin;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushAssignBelowUnionAllRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushAssignDownThroughProductRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushDieUpRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushLimitDownRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushNestedOrderByUnderPreSortedGroupByRule;
 import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushProjectDownRule;
@@ -134,7 +135,6 @@
         List<IAlgebraicRewriteRule> condPushDownAndJoinInference = new LinkedList<IAlgebraicRewriteRule>();
 
         condPushDownAndJoinInference.add(new PushSelectDownRule());
-        condPushDownAndJoinInference.add(new PushDieUpRule());
         condPushDownAndJoinInference.add(new RemoveRedundantListifyRule());
         condPushDownAndJoinInference.add(new SimpleUnnestToProductRule());
         condPushDownAndJoinInference.add(new ComplexUnnestToProductRule());
@@ -193,6 +193,7 @@
         consolidation.add(new CountVarToCountOneRule());
         consolidation.add(new RemoveUnusedAssignAndAggregateRule());
         consolidation.add(new RemoveRedundantGroupByDecorVars());
+        consolidation.add(new NestedSubplanToJoinRule());
         return consolidation;
     }
 
@@ -253,6 +254,7 @@
         physicalRewritesTopLevel.add(new PushLimitDownRule());
         physicalRewritesTopLevel.add(new IntroduceProjectsRule());
         physicalRewritesTopLevel.add(new SetAlgebricksPhysicalOperatorsRule());
+        physicalRewritesTopLevel.add(new IntroduceRapidFrameFlushProjectRule());
         physicalRewritesTopLevel.add(new SetExecutionModeRule());
         return physicalRewritesTopLevel;
     }
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
index 2dce5f6..1f242b8 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
@@ -24,6 +24,7 @@
 import edu.uci.ics.asterix.aql.util.FunctionUtils;
 import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
 import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -108,8 +109,7 @@
             inputRecordVar = usedVariables.get(0);
         }
         IVariableTypeEnvironment env = oldAssignOperator.computeInputTypeEnvironment(context);
-        ARecordType inputRecordType = (ARecordType) env.getVarType(inputRecordVar);
-
+        IAType inputRecordType = (IAType) env.getVarType(inputRecordVar);
         boolean needCast = !requiredRecordType.equals(inputRecordType);
         if (!needCast)
             return false;
@@ -120,11 +120,8 @@
         // assign
         AbstractFunctionCallExpression cast = new ScalarFunctionCallExpression(
                 FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CAST_RECORD));
-        ARecordType[] types = new ARecordType[2];
-        types[0] = requiredRecordType;
-        types[1] = inputRecordType;
         cast.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(inputRecordVar)));
-        cast.setOpaqueParameters(types);
+        TypeComputerUtilities.setRequiredAndInputTypes(cast, requiredRecordType, inputRecordType);
         LogicalVariable newAssignVar = context.newVar();
         AssignOperator newAssignOperator = new AssignOperator(newAssignVar, new MutableObject<ILogicalExpression>(cast));
         newAssignOperator.getInputs().add(new MutableObject<ILogicalOperator>(op3));
@@ -142,6 +139,10 @@
         newInserDeleteOperator.getInputs().add(new MutableObject<ILogicalOperator>(projectOperator));
         insertDeleteOperator.getInputs().clear();
         op1.getInputs().get(0).setValue(newInserDeleteOperator);
+
+        context.computeAndSetTypeEnvironmentForOperator(newAssignOperator);
+        context.computeAndSetTypeEnvironmentForOperator(projectOperator);
+        context.computeAndSetTypeEnvironmentForOperator(newInserDeleteOperator);
         return true;
     }
 
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectRule.java
new file mode 100644
index 0000000..d3e11ed2
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectRule.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.algebra.operators.CommitOperator;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.StreamProjectPOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This rule will search for project operators in an insert/delete/update plan and
+ * pass a hint to all those projects between the first "insert" and the commit
+ * operator. This hint is used by the project operator so that frames are pushed to
+ * the next operator without waiting until they get full. The purpose of this is to
+ * reduce the time of holding exclusive locks on the keys that have been inserted.
+ * 
+ * @author salsubaiee
+ */
+public class IntroduceRapidFrameFlushProjectRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    private boolean checkIfRuleIsApplicable(AbstractLogicalOperator op) {
+        if (op.getOperatorTag() != LogicalOperatorTag.EXTENSION_OPERATOR) {
+            return false;
+        }
+        ExtensionOperator extensionOp = (ExtensionOperator) op;
+        if (!(extensionOp.getDelegate() instanceof CommitOperator)) {
+            return false;
+        }
+
+        AbstractLogicalOperator descendantOp = op;
+        while (descendantOp != null) {
+            if (descendantOp.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+                if (descendantOp.getPhysicalOperator() == null) {
+                    return false;
+                }
+            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE) {
+                break;
+            }
+            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
+        }
+        return true;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+
+        if (!checkIfRuleIsApplicable(op)) {
+            return false;
+        }
+        AbstractLogicalOperator descendantOp = op;
+        ProjectOperator projectOp = null;
+
+        boolean planModified = false;
+        while (descendantOp != null) {
+            if (descendantOp.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+                projectOp = (ProjectOperator) descendantOp;
+                StreamProjectPOperator physicalOp = (StreamProjectPOperator) projectOp.getPhysicalOperator();
+                physicalOp.setRapidFrameFlush(true);
+                planModified = true;
+            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE) {
+                break;
+            }
+            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
+        }
+        return planModified;
+    }
+}
\ No newline at end of file
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
index c99e4bc..6985753 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
@@ -64,10 +64,16 @@
         }
 
         FunctionIdentifier fid = null;
+        /** find the record variable */
+        InsertDeleteOperator insertOp = (InsertDeleteOperator) op1;
+        ILogicalExpression recordExpr = insertOp.getPayloadExpression().getValue();
+        List<LogicalVariable> recordVar = new ArrayList<LogicalVariable>();
+        /** assume the payload is always a single variable expression */
+        recordExpr.getUsedVariables(recordVar);
+
         /** op2 is the assign operator which extract primary keys from the record variable */
         AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
-        List<LogicalVariable> recordVar = new ArrayList<LogicalVariable>();
-        VariableUtilities.getUsedVariables(op2, recordVar);
+
         if (recordVar.size() == 0) {
             /**
              * For the case primary key-assignment expressions are constant expressions,
@@ -92,7 +98,6 @@
             AssignOperator assignOp2 = (AssignOperator) op2;
             recordVar.addAll(assignOp2.getVariables());
         }
-        InsertDeleteOperator insertOp = (InsertDeleteOperator) op1;
         AqlDataSource datasetSource = (AqlDataSource) insertOp.getDataSource();
         AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
         String dataverseName = datasetSource.getId().getDataverseName();
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastRule.java
index 3aae2dd..f42782b 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastRule.java
@@ -42,7 +42,7 @@
 /**
  * Statically cast a constant from its type to a specified required type, in a
  * recursive way. It enables: 1. bag-based fields in a record, 2. bidirectional
- * cast of a open field and a matched closed field, and 3. put in null fields
+ * cast of an open field and a matched closed field, and 3. put in null fields
  * when necessary. It should be fired before the constant folding rule.
  * This rule is not responsible for type casting between primitive types.
  * Here is an example: A record { "hobby": {{"music", "coding"}}, "id": "001",
@@ -90,9 +90,6 @@
         InsertDeleteOperator insertDeleteOp = (InsertDeleteOperator) op2;
         if (insertDeleteOp.getOperation() == InsertDeleteOperator.Kind.DELETE)
             return false;
-        AbstractLogicalOperator assignOp = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
-        if (assignOp.getOperatorTag() != LogicalOperatorTag.ASSIGN)
-            return false;
         /**
          * get required record type
          */
@@ -101,21 +98,21 @@
         IAType[] schemaTypes = (IAType[]) dataSource.getSchemaTypes();
         IAType requiredRecordType = schemaTypes[schemaTypes.length - 1];
 
-        AssignOperator topAssignOperator = (AssignOperator) assignOp;
         List<LogicalVariable> usedVariables = new ArrayList<LogicalVariable>();
-        VariableUtilities.getUsedVariables(topAssignOperator, usedVariables);
+        insertDeleteOperator.getPayloadExpression().getValue().getUsedVariables(usedVariables);
 
         // the used variable should contain the record that will be inserted
         // but it will not fail in many cases even if the used variable set is
         // empty
         if (usedVariables.size() == 0)
             return false;
+
         oldRecordVariable = usedVariables.get(0);
         LogicalVariable inputRecordVar = usedVariables.get(0);
-        IVariableTypeEnvironment env = topAssignOperator.computeOutputTypeEnvironment(context);
+        IVariableTypeEnvironment env = insertDeleteOperator.computeOutputTypeEnvironment(context);
         IAType inputRecordType = (IAType) env.getVarType(inputRecordVar);
 
-        AbstractLogicalOperator currentOperator = assignOp;
+        AbstractLogicalOperator currentOperator = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
         /**
          * find the assign operator for the "input record" to the insert_delete
          * operator
@@ -123,6 +120,7 @@
         do {
             context.addToDontApplySet(this, currentOperator);
             if (currentOperator.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+                AssignOperator assignOp = (AssignOperator) currentOperator;
                 producedVariables.clear();
                 VariableUtilities.getProducedVariables(currentOperator, producedVariables);
                 int position = producedVariables.indexOf(oldRecordVariable);
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/NestedSubplanToJoinRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/NestedSubplanToJoinRule.java
new file mode 100644
index 0000000..c5d415e
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/NestedSubplanToJoinRule.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * replace Subplan operators with nested loop joins where the join condition is true, if the Subplan
+ * does not contain free variables (does not have correlations to the input stream).
+ * 
+ * @author yingyib
+ */
+public class NestedSubplanToJoinRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        if (context.checkIfInDontApplySet(this, opRef.getValue()))
+            return false;
+        context.addToDontApplySet(this, opRef.getValue());
+
+        ILogicalOperator op1 = opRef.getValue();
+        if (op1.getInputs().size() == 0) {
+            return false;
+        }
+
+        boolean rewritten = false;
+        for (int index = 0; index < op1.getInputs().size(); index++) {
+            AbstractLogicalOperator child = (AbstractLogicalOperator) op1.getInputs().get(index).getValue();
+            if (child.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
+                continue;
+            }
+
+            AbstractOperatorWithNestedPlans subplan = (AbstractOperatorWithNestedPlans) child;
+            Set<LogicalVariable> freeVars = new HashSet<LogicalVariable>();
+            OperatorPropertiesUtil.getFreeVariablesInSubplans(subplan, freeVars);
+            if (!freeVars.isEmpty()) {
+                /**
+                 * the subplan is correlated with the outer plan, other rules can deal with it
+                 */
+                continue;
+            }
+
+            /** get the input operator of the subplan operator */
+            ILogicalOperator subplanInput = subplan.getInputs().get(0).getValue();
+
+            /** get all nested top operators */
+            List<ILogicalPlan> nestedPlans = subplan.getNestedPlans();
+            List<Mutable<ILogicalOperator>> nestedRoots = new ArrayList<Mutable<ILogicalOperator>>();
+            for (ILogicalPlan nestedPlan : nestedPlans) {
+                nestedRoots.addAll(nestedPlan.getRoots());
+            }
+            if (nestedRoots.size() == 0) {
+                /** there is no nested top operators */
+                return false;
+            }
+
+            /** expend the input and roots into a DAG of nested loop joins */
+            Mutable<ILogicalExpression> expr = new MutableObject<ILogicalExpression>(ConstantExpression.TRUE);
+            Mutable<ILogicalOperator> nestedRootRef = nestedRoots.get(0);
+            ILogicalOperator join = new LeftOuterJoinOperator(expr, new MutableObject<ILogicalOperator>(subplanInput),
+                    nestedRootRef);
+
+            /** rewrite the nested tuple source to be empty tuple source */
+            rewriteNestedTupleSource(nestedRootRef);
+
+            for (int i = 1; i < nestedRoots.size(); i++) {
+                join = new LeftOuterJoinOperator(expr, new MutableObject<ILogicalOperator>(join), nestedRoots.get(i));
+            }
+            op1.getInputs().get(index).setValue(join);
+            context.computeAndSetTypeEnvironmentForOperator(join);
+            rewritten = true;
+        }
+        return rewritten;
+    }
+
+    /**
+     * rewrite NestedTupleSource operators to EmptyTupleSource operators
+     * 
+     * @param nestedRootRef
+     */
+    private void rewriteNestedTupleSource(Mutable<ILogicalOperator> nestedRootRef) {
+        AbstractLogicalOperator nestedRoot = (AbstractLogicalOperator) nestedRootRef.getValue();
+        if (nestedRoot.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
+            nestedRootRef.setValue(new EmptyTupleSourceOperator());
+        }
+        List<Mutable<ILogicalOperator>> inputs = nestedRoot.getInputs();
+        for (Mutable<ILogicalOperator> input : inputs) {
+            rewriteNestedTupleSource(input);
+        }
+    }
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
index c5a1cb0..bee8c40 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
@@ -111,19 +111,7 @@
         
         // The assign now just "renames" the variable to make sure the upstream plan still works.
         srcAssignExprRef.setValue(new VariableReferenceExpression(aggVar));
-
-        // Create a new assign for a TRUE variable.
-        LogicalVariable trueVar = context.newVar();
-        AssignOperator trueAssignOp = new AssignOperator(trueVar, new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
         
-        ILogicalOperator aggInput = aggOp.getInputs().get(0).getValue();
-        aggOp.getInputs().get(0).setValue(trueAssignOp);
-        trueAssignOp.getInputs().add(new MutableObject<ILogicalOperator>(aggInput));
-        
-        // Set partitioning variable.
-        aggOp.setPartitioningVariable(trueVar);
-        
-        context.computeAndSetTypeEnvironmentForOperator(trueAssignOp);
         context.computeAndSetTypeEnvironmentForOperator(aggOp);
         context.computeAndSetTypeEnvironmentForOperator(assignOp);
         
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
index 9aa19c6..b60ba47 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
@@ -17,11 +17,14 @@
 
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.commons.lang3.mutable.MutableObject;
 
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
 import edu.uci.ics.asterix.om.base.ANull;
 import edu.uci.ics.asterix.om.base.AString;
 import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
@@ -39,10 +42,12 @@
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
 import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
 import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
 import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
 import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
 
 /**
  * This class is utility to do type cast.
@@ -124,6 +129,11 @@
      */
     public static boolean rewriteFuncExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
             IVariableTypeEnvironment env) throws AlgebricksException {
+        /**
+         * sanity check: if there are list(ordered or unordered)/record variable expressions in the funcExpr, we will not do STATIC type casting
+         * because they are not "statically cast-able".
+         * instead, the record will be dynamically casted at the runtime
+         */
         if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR) {
             if (reqType.equals(BuiltinType.ANY)) {
                 reqType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
@@ -152,6 +162,10 @@
                     changed = changed || rewriteFuncExpr(argFuncExpr, exprType, exprType, env);
                 }
             }
+            if (!compatible(reqType, inputType)) {
+                throw new AlgebricksException("type mistmach, requred: " + reqType.toString() + " actual: "
+                        + inputType.toString());
+            }
             return changed;
         }
     }
@@ -403,32 +417,124 @@
         for (int i = 0; i < openFields.length; i++) {
             if (openFields[i]) {
                 arguments.add(originalArguments.get(2 * i));
-                Mutable<ILogicalExpression> fExprRef = originalArguments.get(2 * i + 1);
-                ILogicalExpression argExpr = fExprRef.getValue();
-
+                Mutable<ILogicalExpression> expRef = originalArguments.get(2 * i + 1);
+                ILogicalExpression argExpr = expRef.getValue();
+                List<LogicalVariable> parameterVars = new ArrayList<LogicalVariable>();
+                argExpr.getUsedVariables(parameterVars);
                 // we need to handle open fields recursively by their default
                 // types
                 // for list, their item type is any
                 // for record, their
-                if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                boolean castInjected = false;
+                if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL
+                        || argExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
                     IAType reqFieldType = inputFieldTypes[i];
+                    // do not enforce nested type in the case of no-used variables
                     if (inputFieldTypes[i].getTypeTag() == ATypeTag.RECORD) {
                         reqFieldType = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
+                        if (!inputFieldTypes[i].equals(reqFieldType) && parameterVars.size() > 0) {
+                            //inject dynamic type casting
+                            injectCastFunction(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CAST_RECORD),
+                                    reqFieldType, inputFieldTypes[i], expRef, argExpr);
+                            castInjected = true;
+                        }
                     }
                     if (inputFieldTypes[i].getTypeTag() == ATypeTag.ORDEREDLIST) {
                         reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
+                        if (!inputFieldTypes[i].equals(reqFieldType) && parameterVars.size() > 0) {
+                            //inject dynamic type casting
+                            injectCastFunction(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CAST_LIST),
+                                    reqFieldType, inputFieldTypes[i], expRef, argExpr);
+                            castInjected = true;
+                        }
                     }
                     if (inputFieldTypes[i].getTypeTag() == ATypeTag.UNORDEREDLIST) {
                         reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
+                        if (!inputFieldTypes[i].equals(reqFieldType) && parameterVars.size() > 0) {
+                            //inject dynamic type casting
+                            injectCastFunction(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CAST_LIST),
+                                    reqFieldType, inputFieldTypes[i], expRef, argExpr);
+                            castInjected = true;
+                        }
                     }
-                    if (TypeComputerUtilities.getRequiredType((AbstractFunctionCallExpression) argExpr) == null) {
-                        ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) argExpr;
-                        rewriteFuncExpr(argFunc, reqFieldType, inputFieldTypes[i], env);
+                    if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                        //recursively rewrite function arguments
+                        if (TypeComputerUtilities.getRequiredType((AbstractFunctionCallExpression) argExpr) == null
+                                && reqFieldType != null) {
+                            if (castInjected) {
+                                //rewrite the arg expression inside the dynamic cast
+                                ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) argExpr;
+                                rewriteFuncExpr(argFunc, inputFieldTypes[i], inputFieldTypes[i], env);
+                            } else {
+                                //rewrite arg
+                                ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) argExpr;
+                                rewriteFuncExpr(argFunc, reqFieldType, inputFieldTypes[i], env);
+                            }
+                        }
                     }
                 }
-                arguments.add(fExprRef);
+                arguments.add(expRef);
             }
         }
     }
 
+    /**
+     * Inject a dynamic cast function wrapping an existing expression
+     * 
+     * @param funcInfo
+     *            the cast function
+     * @param reqType
+     *            the required type
+     * @param inputType
+     *            the original type
+     * @param exprRef
+     *            the expression reference
+     * @param argExpr
+     *            the original expression
+     */
+    private static void injectCastFunction(IFunctionInfo funcInfo, IAType reqType, IAType inputType,
+            Mutable<ILogicalExpression> exprRef, ILogicalExpression argExpr) {
+        ScalarFunctionCallExpression cast = new ScalarFunctionCallExpression(funcInfo);
+        cast.getArguments().add(new MutableObject<ILogicalExpression>(argExpr));
+        exprRef.setValue(cast);
+        TypeComputerUtilities.setRequiredAndInputTypes(cast, reqType, inputType);
+    }
+
+    /**
+     * Determine if two types are compatible
+     * 
+     * @param reqType
+     *            the required type
+     * @param inputType
+     *            the input type
+     * @return true if the two types are compatiable; false otherwise
+     */
+    private static boolean compatible(IAType reqType, IAType inputType) {
+        if (reqType.getTypeTag() == ATypeTag.ANY || inputType.getTypeTag() == ATypeTag.ANY) {
+            return true;
+        }
+        if (reqType.getTypeTag() != ATypeTag.UNION && inputType.getTypeTag() != ATypeTag.UNION) {
+            if (reqType.equals(inputType)) {
+                return true;
+            } else {
+                return false;
+            }
+        }
+        Set<IAType> reqTypePossible = new HashSet<IAType>();
+        Set<IAType> inputTypePossible = new HashSet<IAType>();
+        if (reqType.getTypeTag() == ATypeTag.UNION) {
+            AUnionType unionType = (AUnionType) reqType;
+            reqTypePossible.addAll(unionType.getUnionList());
+        } else {
+            reqTypePossible.add(reqType);
+        }
+
+        if (inputType.getTypeTag() == ATypeTag.UNION) {
+            AUnionType unionType = (AUnionType) inputType;
+            inputTypePossible.addAll(unionType.getUnionList());
+        } else {
+            inputTypePossible.add(inputType);
+        }
+        return reqTypePossible.equals(inputTypePossible);
+    }
 }
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
index 0a5e4c5..b01e3e9 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
@@ -22,7 +22,6 @@
 import edu.uci.ics.asterix.aql.expression.DataverseDecl;
 import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
 import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
 import edu.uci.ics.asterix.aql.expression.DistinctClause;
 import edu.uci.ics.asterix.aql.expression.DropStatement;
 import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -72,6 +71,8 @@
 import edu.uci.ics.asterix.aql.expression.WriteStatement;
 import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
 import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfo;
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.common.functions.FunctionConstants;
@@ -79,7 +80,6 @@
 import edu.uci.ics.asterix.formats.base.IDataFormat;
 import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.bootstrap.AsterixProperties;
 import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
 import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
 import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
@@ -125,7 +125,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -186,7 +185,6 @@
                 new EmptyTupleSourceOperator()));
 
         ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
-        boolean isTransactionalWrite = false;
         ILogicalOperator topOp = p.first;
         ProjectOperator project = (ProjectOperator) topOp;
         LogicalVariable resVar = project.getVariables().get(0);
@@ -200,7 +198,7 @@
 
             List<Mutable<ILogicalExpression>> writeExprList = new ArrayList<Mutable<ILogicalExpression>>(1);
             writeExprList.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)));
-            ResultSetSinkId rssId = new ResultSetSinkId(metadataProvider.getResultSetId(), resultNodeName);
+            ResultSetSinkId rssId = new ResultSetSinkId(metadataProvider.getResultSetId());
             ResultSetDataSink sink = new ResultSetDataSink(rssId, null);
             topOp = new DistributeResultOperator(writeExprList, sink);
             topOp.getInputs().add(new MutableObject<ILogicalOperator>(project));
@@ -245,7 +243,6 @@
                     insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
                     leafOperator = new SinkOperator();
                     leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
-                    isTransactionalWrite = true;
                     break;
                 }
                 case DELETE: {
@@ -254,7 +251,6 @@
                     deleteOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
                     leafOperator = new SinkOperator();
                     leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(deleteOp));
-                    isTransactionalWrite = true;
                     break;
                 }
                 case BEGIN_FEED: {
@@ -263,7 +259,6 @@
                     insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
                     leafOperator = new SinkOperator();
                     leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
-                    isTransactionalWrite = false;
                     break;
                 }
             }
@@ -292,13 +287,11 @@
     }
 
     private FileSplit getDefaultOutputFileLocation() throws MetadataException {
-        if (AsterixProperties.INSTANCE.getOutputDir() == null) {
-            throw new MetadataException(
-                    "Output location for query result not specified at the time of deployment, must specify explicitly using 'write output to ..' statement");
-        }
-        String filePath = AsterixProperties.INSTANCE.getOutputDir() + System.getProperty("file.separator")
-                + OUTPUT_FILE_PREFIX + outputFileID.incrementAndGet();
-        return new FileSplit(AsterixProperties.INSTANCE.getMetadataNodeName(), new FileReference(new File(filePath)));
+        String outputDir = System.getProperty("java.io.tmpDir");
+        String filePath = outputDir + System.getProperty("file.separator") + OUTPUT_FILE_PREFIX
+                + outputFileID.incrementAndGet();
+        AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.getInstance().getMetadataProperties();
+        return new FileSplit(metadataProperties.getMetadataNodeName(), new FileReference(new File(filePath)));
     }
 
     @Override
@@ -931,15 +924,6 @@
     }
 
     @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDieClause(DieClause lc, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p1 = aqlExprToAlgExpression(lc.getDieExpr(), tupSource);
-        DieOperator opDie = new DieOperator(p1.first);
-        opDie.getInputs().add(p1.second);
-        return new Pair<ILogicalOperator, LogicalVariable>(opDie, null);
-    }
-
-    @Override
     public Pair<ILogicalOperator, LogicalVariable> visitDistinctClause(DistinctClause dc,
             Mutable<ILogicalOperator> tupSource) throws AsterixException {
         List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
index 3440ce8..9f28113 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
@@ -22,7 +22,6 @@
 import edu.uci.ics.asterix.aql.expression.DataverseDecl;
 import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
 import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
 import edu.uci.ics.asterix.aql.expression.DistinctClause;
 import edu.uci.ics.asterix.aql.expression.DropStatement;
 import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -123,7 +122,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
 import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
@@ -192,7 +190,7 @@
     }
 
     private final JobId jobId;
-   private TranslationContext context;
+    private TranslationContext context;
     private String outputDatasetName;
     private ICompiledDmlStatement stmt;
     private AqlMetadataProvider metadataProvider;
@@ -201,8 +199,6 @@
     private MetaScopeILogicalOperator metaScopeOp = new MetaScopeILogicalOperator();
     private static LogicalVariable METADATA_DUMMY_VAR = new LogicalVariable(-1);
 
-    
-    
     public AqlPlusExpressionToPlanTranslator(JobId jobId, AqlMetadataProvider metadataProvider,
             Counter currentVarCounter, String outputDatasetName, ICompiledDmlStatement stmt) {
         this.jobId = jobId;
@@ -221,8 +217,8 @@
         return translate(expr, null);
     }
 
-    public ILogicalPlan translate(Query expr, AqlMetadataProvider metadata)
-            throws AlgebricksException, AsterixException {
+    public ILogicalPlan translate(Query expr, AqlMetadataProvider metadata) throws AlgebricksException,
+            AsterixException {
         IDataFormat format = metadata.getFormat();
         if (format == null) {
             throw new AlgebricksException("Data format has not been set.");
@@ -877,15 +873,6 @@
     }
 
     @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDieClause(DieClause lc, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p1 = aqlExprToAlgExpression(lc.getDieExpr(), tupSource);
-        DieOperator opDie = new DieOperator(p1.first);
-        opDie.getInputs().add(p1.second);
-        return new Pair<ILogicalOperator, LogicalVariable>(opDie, null);
-    }
-
-    @Override
     public Pair<ILogicalOperator, LogicalVariable> visitDistinctClause(DistinctClause dc,
             Mutable<ILogicalOperator> tupSource) throws AsterixException {
         List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
index 625fed1..6cf8d28 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
@@ -49,527 +49,490 @@
  */
 public class CompiledStatements {
 
-	public static interface ICompiledStatement {
+    public static interface ICompiledStatement {
 
-		public Kind getKind();
-	}
+        public Kind getKind();
+    }
 
-	public static class CompiledWriteFromQueryResultStatement implements
-			ICompiledDmlStatement {
+    public static class CompiledWriteFromQueryResultStatement implements ICompiledDmlStatement {
 
-		private String dataverseName;
-		private String datasetName;
-		private Query query;
-		private int varCounter;
+        private String dataverseName;
+        private String datasetName;
+        private Query query;
+        private int varCounter;
 
-		public CompiledWriteFromQueryResultStatement(String dataverseName,
-				String datasetName, Query query, int varCounter) {
-			this.dataverseName = dataverseName;
-			this.datasetName = datasetName;
-			this.query = query;
-			this.varCounter = varCounter;
-		}
+        public CompiledWriteFromQueryResultStatement(String dataverseName, String datasetName, Query query,
+                int varCounter) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.query = query;
+            this.varCounter = varCounter;
+        }
 
-		public String getDataverseName() {
-			return dataverseName;
-		}
+        public String getDataverseName() {
+            return dataverseName;
+        }
 
-		public String getDatasetName() {
-			return datasetName;
-		}
+        public String getDatasetName() {
+            return datasetName;
+        }
 
-		public int getVarCounter() {
-			return varCounter;
-		}
+        public int getVarCounter() {
+            return varCounter;
+        }
 
-		public Query getQuery() {
-			return query;
-		}
+        public Query getQuery() {
+            return query;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.WRITE_FROM_QUERY_RESULT;
-		}
+        @Override
+        public Kind getKind() {
+            return Kind.WRITE_FROM_QUERY_RESULT;
+        }
 
-	}
+    }
 
-	public static class CompiledDatasetDropStatement implements
-			ICompiledStatement {
-		private final String dataverseName;
-		private final String datasetName;
+    public static class CompiledDatasetDropStatement implements ICompiledStatement {
+        private final String dataverseName;
+        private final String datasetName;
 
-		public CompiledDatasetDropStatement(String dataverseName,
-				String datasetName) {
-			this.dataverseName = dataverseName;
-			this.datasetName = datasetName;
-		}
+        public CompiledDatasetDropStatement(String dataverseName, String datasetName) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+        }
 
-		public String getDataverseName() {
-			return dataverseName;
-		}
+        public String getDataverseName() {
+            return dataverseName;
+        }
 
-		public String getDatasetName() {
-			return datasetName;
-		}
+        public String getDatasetName() {
+            return datasetName;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.DATASET_DROP;
-		}
-	}
+        @Override
+        public Kind getKind() {
+            return Kind.DATASET_DROP;
+        }
+    }
 
-	// added by yasser
-	public static class CompiledCreateDataverseStatement implements
-			ICompiledStatement {
-		private String dataverseName;
-		private String format;
+    // added by yasser
+    public static class CompiledCreateDataverseStatement implements ICompiledStatement {
+        private String dataverseName;
+        private String format;
 
-		public CompiledCreateDataverseStatement(String dataverseName,
-				String format) {
-			this.dataverseName = dataverseName;
-			this.format = format;
-		}
+        public CompiledCreateDataverseStatement(String dataverseName, String format) {
+            this.dataverseName = dataverseName;
+            this.format = format;
+        }
 
-		public String getDataverseName() {
-			return dataverseName;
-		}
+        public String getDataverseName() {
+            return dataverseName;
+        }
 
-		public String getFormat() {
-			return format;
-		}
+        public String getFormat() {
+            return format;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.CREATE_DATAVERSE;
-		}
-	}
+        @Override
+        public Kind getKind() {
+            return Kind.CREATE_DATAVERSE;
+        }
+    }
 
-	public static class CompiledNodeGroupDropStatement implements
-			ICompiledStatement {
-		private String nodeGroupName;
+    public static class CompiledNodeGroupDropStatement implements ICompiledStatement {
+        private String nodeGroupName;
 
-		public CompiledNodeGroupDropStatement(String nodeGroupName) {
-			this.nodeGroupName = nodeGroupName;
-		}
+        public CompiledNodeGroupDropStatement(String nodeGroupName) {
+            this.nodeGroupName = nodeGroupName;
+        }
 
-		public String getNodeGroupName() {
-			return nodeGroupName;
-		}
+        public String getNodeGroupName() {
+            return nodeGroupName;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.NODEGROUP_DROP;
-		}
-	}
+        @Override
+        public Kind getKind() {
+            return Kind.NODEGROUP_DROP;
+        }
+    }
 
-	public static class CompiledIndexDropStatement implements
-			ICompiledStatement {
-		private String dataverseName;
-		private String datasetName;
-		private String indexName;
+    public static class CompiledIndexDropStatement implements ICompiledStatement {
+        private String dataverseName;
+        private String datasetName;
+        private String indexName;
 
-		public CompiledIndexDropStatement(String dataverseName,
-				String datasetName, String indexName) {
-			this.dataverseName = dataverseName;
-			this.datasetName = datasetName;
-			this.indexName = indexName;
-		}
+        public CompiledIndexDropStatement(String dataverseName, String datasetName, String indexName) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.indexName = indexName;
+        }
 
-		public String getDataverseName() {
-			return dataverseName;
-		}
+        public String getDataverseName() {
+            return dataverseName;
+        }
 
-		public String getDatasetName() {
-			return datasetName;
-		}
+        public String getDatasetName() {
+            return datasetName;
+        }
 
-		public String getIndexName() {
-			return indexName;
-		}
+        public String getIndexName() {
+            return indexName;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.INDEX_DROP;
-		}
-	}
+        @Override
+        public Kind getKind() {
+            return Kind.INDEX_DROP;
+        }
+    }
 
-	public static class CompiledDataverseDropStatement implements
-			ICompiledStatement {
-		private String dataverseName;
-		private boolean ifExists;
+    public static class CompiledDataverseDropStatement implements ICompiledStatement {
+        private String dataverseName;
+        private boolean ifExists;
 
-		public CompiledDataverseDropStatement(String dataverseName,
-				boolean ifExists) {
-			this.dataverseName = dataverseName;
-			this.ifExists = ifExists;
-		}
+        public CompiledDataverseDropStatement(String dataverseName, boolean ifExists) {
+            this.dataverseName = dataverseName;
+            this.ifExists = ifExists;
+        }
 
-		public String getDataverseName() {
-			return dataverseName;
-		}
+        public String getDataverseName() {
+            return dataverseName;
+        }
 
-		public boolean getIfExists() {
-			return ifExists;
-		}
+        public boolean getIfExists() {
+            return ifExists;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.DATAVERSE_DROP;
-		}
-	}
+        @Override
+        public Kind getKind() {
+            return Kind.DATAVERSE_DROP;
+        }
+    }
 
-	public static class CompiledTypeDropStatement implements ICompiledStatement {
-		private String typeName;
+    public static class CompiledTypeDropStatement implements ICompiledStatement {
+        private String typeName;
 
-		public CompiledTypeDropStatement(String nodeGroupName) {
-			this.typeName = nodeGroupName;
-		}
+        public CompiledTypeDropStatement(String nodeGroupName) {
+            this.typeName = nodeGroupName;
+        }
 
-		public String getTypeName() {
-			return typeName;
-		}
+        public String getTypeName() {
+            return typeName;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.TYPE_DROP;
-		}
-	}
+        @Override
+        public Kind getKind() {
+            return Kind.TYPE_DROP;
+        }
+    }
 
-	public static interface ICompiledDmlStatement extends ICompiledStatement {
+    public static interface ICompiledDmlStatement extends ICompiledStatement {
 
-		public String getDataverseName();
+        public String getDataverseName();
 
-		public String getDatasetName();
-	}
+        public String getDatasetName();
+    }
 
-	public static class CompiledCreateIndexStatement implements
-			ICompiledDmlStatement {
-		private final String indexName;
-		private final String dataverseName;
-		private final String datasetName;
-		private final List<String> keyFields;
-		private final IndexType indexType;
+    public static class CompiledCreateIndexStatement implements ICompiledDmlStatement {
+        private final String indexName;
+        private final String dataverseName;
+        private final String datasetName;
+        private final List<String> keyFields;
+        private final IndexType indexType;
 
-		// Specific to NGram index.
-		private final int gramLength;
+        // Specific to NGram index.
+        private final int gramLength;
 
-		public CompiledCreateIndexStatement(String indexName,
-				String dataverseName, String datasetName,
-				List<String> keyFields, int gramLength, IndexType indexType) {
-			this.indexName = indexName;
-			this.dataverseName = dataverseName;
-			this.datasetName = datasetName;
-			this.keyFields = keyFields;
-			this.gramLength = gramLength;
-			this.indexType = indexType;
-		}
+        public CompiledCreateIndexStatement(String indexName, String dataverseName, String datasetName,
+                List<String> keyFields, int gramLength, IndexType indexType) {
+            this.indexName = indexName;
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.keyFields = keyFields;
+            this.gramLength = gramLength;
+            this.indexType = indexType;
+        }
 
-		public String getDatasetName() {
-			return datasetName;
-		}
+        public String getDatasetName() {
+            return datasetName;
+        }
 
-		public String getDataverseName() {
-			return dataverseName;
-		}
+        public String getDataverseName() {
+            return dataverseName;
+        }
 
-		public String getIndexName() {
-			return indexName;
-		}
+        public String getIndexName() {
+            return indexName;
+        }
 
-		public List<String> getKeyFields() {
-			return keyFields;
-		}
+        public List<String> getKeyFields() {
+            return keyFields;
+        }
 
-		public IndexType getIndexType() {
-			return indexType;
-		}
+        public IndexType getIndexType() {
+            return indexType;
+        }
 
-		public int getGramLength() {
-			return gramLength;
-		}
+        public int getGramLength() {
+            return gramLength;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.CREATE_INDEX;
-		}
-	}
+        @Override
+        public Kind getKind() {
+            return Kind.CREATE_INDEX;
+        }
+    }
 
-	public static class CompiledLoadFromFileStatement implements
-			ICompiledDmlStatement {
-		private String dataverseName;
-		private String datasetName;
-		private boolean alreadySorted;
-		private String adapter;
-		private Map<String, String> properties;
+    public static class CompiledLoadFromFileStatement implements ICompiledDmlStatement {
+        private String dataverseName;
+        private String datasetName;
+        private boolean alreadySorted;
+        private String adapter;
+        private Map<String, String> properties;
 
-		public CompiledLoadFromFileStatement(String dataverseName,
-				String datasetName, String adapter,
-				Map<String, String> properties, boolean alreadySorted) {
-			this.dataverseName = dataverseName;
-			this.datasetName = datasetName;
-			this.alreadySorted = alreadySorted;
-			this.adapter = adapter;
-			this.properties = properties;
-		}
+        public CompiledLoadFromFileStatement(String dataverseName, String datasetName, String adapter,
+                Map<String, String> properties, boolean alreadySorted) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.alreadySorted = alreadySorted;
+            this.adapter = adapter;
+            this.properties = properties;
+        }
 
-		public String getDataverseName() {
-			return dataverseName;
-		}
-
-		public String getDatasetName() {
-			return datasetName;
-		}
+        public String getDataverseName() {
+            return dataverseName;
+        }
 
-		public boolean alreadySorted() {
-			return alreadySorted;
-		}
+        public String getDatasetName() {
+            return datasetName;
+        }
 
-		public String getAdapter() {
-			return adapter;
-		}
+        public boolean alreadySorted() {
+            return alreadySorted;
+        }
 
-		public Map<String, String> getProperties() {
-			return properties;
-		}
+        public String getAdapter() {
+            return adapter;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.LOAD_FROM_FILE;
-		}
-	}
+        public Map<String, String> getProperties() {
+            return properties;
+        }
 
-	public static class CompiledInsertStatement implements
-			ICompiledDmlStatement {
-		private final String dataverseName;
-		private final String datasetName;
-		private final Query query;
-		private final int varCounter;
+        @Override
+        public Kind getKind() {
+            return Kind.LOAD_FROM_FILE;
+        }
+    }
 
-		public CompiledInsertStatement(String dataverseName,
-				String datasetName, Query query, int varCounter) {
-			this.dataverseName = dataverseName;
-			this.datasetName = datasetName;
-			this.query = query;
-			this.varCounter = varCounter;
-		}
+    public static class CompiledInsertStatement implements ICompiledDmlStatement {
+        private final String dataverseName;
+        private final String datasetName;
+        private final Query query;
+        private final int varCounter;
 
-		public String getDataverseName() {
-			return dataverseName;
-		}
+        public CompiledInsertStatement(String dataverseName, String datasetName, Query query, int varCounter) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.query = query;
+            this.varCounter = varCounter;
+        }
 
-		public String getDatasetName() {
-			return datasetName;
-		}
+        public String getDataverseName() {
+            return dataverseName;
+        }
 
-		public int getVarCounter() {
-			return varCounter;
-		}
+        public String getDatasetName() {
+            return datasetName;
+        }
 
-		public Query getQuery() {
-			return query;
-		}
+        public int getVarCounter() {
+            return varCounter;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.INSERT;
-		}
-	}
+        public Query getQuery() {
+            return query;
+        }
 
-	public static class CompiledBeginFeedStatement implements
-			ICompiledDmlStatement {
-		private String dataverseName;
-		private String datasetName;
-		private Query query;
-		private int varCounter;
+        @Override
+        public Kind getKind() {
+            return Kind.INSERT;
+        }
+    }
 
-		public CompiledBeginFeedStatement(String dataverseName,
-				String datasetName, Query query, int varCounter) {
-			this.dataverseName = dataverseName;
-			this.datasetName = datasetName;
-			this.query = query;
-			this.varCounter = varCounter;
-		}
+    public static class CompiledBeginFeedStatement implements ICompiledDmlStatement {
+        private String dataverseName;
+        private String datasetName;
+        private Query query;
+        private int varCounter;
 
-		@Override
-		public String getDataverseName() {
-			return dataverseName;
-		}
+        public CompiledBeginFeedStatement(String dataverseName, String datasetName, Query query, int varCounter) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.query = query;
+            this.varCounter = varCounter;
+        }
 
-		@Override
-		public String getDatasetName() {
-			return datasetName;
-		}
+        @Override
+        public String getDataverseName() {
+            return dataverseName;
+        }
 
-		public int getVarCounter() {
-			return varCounter;
-		}
+        @Override
+        public String getDatasetName() {
+            return datasetName;
+        }
 
-		public Query getQuery() {
-			return query;
-		}
+        public int getVarCounter() {
+            return varCounter;
+        }
 
-		public void setQuery(Query query) {
-			this.query = query;
-		}
+        public Query getQuery() {
+            return query;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.BEGIN_FEED;
-		}
-	}
+        public void setQuery(Query query) {
+            this.query = query;
+        }
 
-	public static class CompiledControlFeedStatement implements
-			ICompiledDmlStatement {
-		private String dataverseName;
-		private String datasetName;
-		private OperationType operationType;
-		private Query query;
-		private int varCounter;
-		private Map<String, String> alteredParams;
+        @Override
+        public Kind getKind() {
+            return Kind.BEGIN_FEED;
+        }
+    }
 
-		public CompiledControlFeedStatement(OperationType operationType,
-				String dataverseName, String datasetName,
-				Map<String, String> alteredParams) {
-			this.dataverseName = dataverseName;
-			this.datasetName = datasetName;
-			this.operationType = operationType;
-			this.alteredParams = alteredParams;
-		}
+    public static class CompiledControlFeedStatement implements ICompiledDmlStatement {
+        private String dataverseName;
+        private String datasetName;
+        private OperationType operationType;
+        private Query query;
+        private int varCounter;
+        private Map<String, String> alteredParams;
 
-		@Override
-		public String getDataverseName() {
-			return dataverseName;
-		}
+        public CompiledControlFeedStatement(OperationType operationType, String dataverseName, String datasetName,
+                Map<String, String> alteredParams) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.operationType = operationType;
+            this.alteredParams = alteredParams;
+        }
 
-		@Override
-		public String getDatasetName() {
-			return datasetName;
-		}
+        @Override
+        public String getDataverseName() {
+            return dataverseName;
+        }
 
-		public OperationType getOperationType() {
-			return operationType;
-		}
+        @Override
+        public String getDatasetName() {
+            return datasetName;
+        }
 
-		public int getVarCounter() {
-			return varCounter;
-		}
+        public OperationType getOperationType() {
+            return operationType;
+        }
 
-		public Query getQuery() {
-			return query;
-		}
+        public int getVarCounter() {
+            return varCounter;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.CONTROL_FEED;
-		}
+        public Query getQuery() {
+            return query;
+        }
 
-		public Map<String, String> getProperties() {
-			return alteredParams;
-		}
+        @Override
+        public Kind getKind() {
+            return Kind.CONTROL_FEED;
+        }
 
-		public void setProperties(Map<String, String> properties) {
-			this.alteredParams = properties;
-		}
-	}
+        public Map<String, String> getProperties() {
+            return alteredParams;
+        }
 
-	public static class CompiledDeleteStatement implements
-			ICompiledDmlStatement {
-		private VariableExpr var;
-		private String dataverseName;
-		private String datasetName;
-		private Expression condition;
-		private Clause dieClause;
-		private int varCounter;
-		private AqlMetadataProvider metadataProvider;
+        public void setProperties(Map<String, String> properties) {
+            this.alteredParams = properties;
+        }
+    }
 
-		public CompiledDeleteStatement(VariableExpr var, String dataverseName,
-				String datasetName, Expression condition, Clause dieClause,
-				int varCounter, AqlMetadataProvider metadataProvider) {
-			this.var = var;
-			this.dataverseName = dataverseName;
-			this.datasetName = datasetName;
-			this.condition = condition;
-			this.dieClause = dieClause;
-			this.varCounter = varCounter;
-			this.metadataProvider = metadataProvider;
-		}
+    public static class CompiledDeleteStatement implements ICompiledDmlStatement {
+        private VariableExpr var;
+        private String dataverseName;
+        private String datasetName;
+        private Expression condition;
+        private int varCounter;
+        private AqlMetadataProvider metadataProvider;
 
-		@Override
-		public String getDatasetName() {
-			return datasetName;
-		}
+        public CompiledDeleteStatement(VariableExpr var, String dataverseName, String datasetName,
+                Expression condition, int varCounter, AqlMetadataProvider metadataProvider) {
+            this.var = var;
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.condition = condition;
+            this.varCounter = varCounter;
+            this.metadataProvider = metadataProvider;
+        }
 
-		@Override
-		public String getDataverseName() {
-			return dataverseName;
-		}
+        @Override
+        public String getDatasetName() {
+            return datasetName;
+        }
 
-		public int getVarCounter() {
-			return varCounter;
-		}
+        @Override
+        public String getDataverseName() {
+            return dataverseName;
+        }
 
-		public Expression getCondition() {
-			return condition;
-		}
+        public int getVarCounter() {
+            return varCounter;
+        }
 
-		public Clause getDieClause() {
-			return dieClause;
-		}
+        public Expression getCondition() {
+            return condition;
+        }
 
-		public Query getQuery() throws AlgebricksException {
+        public Query getQuery() throws AlgebricksException {
 
-			List<Expression> arguments = new ArrayList<Expression>();
-			String arg = dataverseName == null ? datasetName : dataverseName
-					+ "." + datasetName;
-			LiteralExpr argumentLiteral = new LiteralExpr(
-					new StringLiteral(arg));
-			arguments.add(argumentLiteral);
+            List<Expression> arguments = new ArrayList<Expression>();
+            String arg = dataverseName == null ? datasetName : dataverseName + "." + datasetName;
+            LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
+            arguments.add(argumentLiteral);
 
-			CallExpr callExpression = new CallExpr(new FunctionSignature(
-					FunctionConstants.ASTERIX_NS, "dataset", 1), arguments);
-			List<Clause> clauseList = new ArrayList<Clause>();
-			Clause forClause = new ForClause(var, callExpression);
-			clauseList.add(forClause);
-			Clause whereClause = null;
-			if (condition != null) {
-				whereClause = new WhereClause(condition);
-				clauseList.add(whereClause);
-			}
-			if (dieClause != null) {
-				clauseList.add(dieClause);
-			}
+            CallExpr callExpression = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, "dataset", 1),
+                    arguments);
+            List<Clause> clauseList = new ArrayList<Clause>();
+            Clause forClause = new ForClause(var, callExpression);
+            clauseList.add(forClause);
+            Clause whereClause = null;
+            if (condition != null) {
+                whereClause = new WhereClause(condition);
+                clauseList.add(whereClause);
+            }
 
-			Dataset dataset = metadataProvider.findDataset(dataverseName,
-					datasetName);
-			if (dataset == null) {
-				throw new AlgebricksException("Unknown dataset " + datasetName);
-			}
-			String itemTypeName = dataset.getItemTypeName();
-			IAType itemType = metadataProvider.findType(
-					dataset.getDataverseName(), itemTypeName);
-			ARecordType recType = (ARecordType) itemType;
-			String[] fieldNames = recType.getFieldNames();
-			List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
-			for (int i = 0; i < fieldNames.length; i++) {
-				FieldAccessor fa = new FieldAccessor(var, new Identifier(
-						fieldNames[i]));
-				FieldBinding fb = new FieldBinding(new LiteralExpr(
-						new StringLiteral(fieldNames[i])), fa);
-				fieldBindings.add(fb);
-			}
-			RecordConstructor rc = new RecordConstructor(fieldBindings);
+            Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+            if (dataset == null) {
+                throw new AlgebricksException("Unknown dataset " + datasetName);
+            }
+            String itemTypeName = dataset.getItemTypeName();
+            IAType itemType = metadataProvider.findType(dataset.getDataverseName(), itemTypeName);
+            ARecordType recType = (ARecordType) itemType;
+            String[] fieldNames = recType.getFieldNames();
+            List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
+            for (int i = 0; i < fieldNames.length; i++) {
+                FieldAccessor fa = new FieldAccessor(var, new Identifier(fieldNames[i]));
+                FieldBinding fb = new FieldBinding(new LiteralExpr(new StringLiteral(fieldNames[i])), fa);
+                fieldBindings.add(fb);
+            }
+            RecordConstructor rc = new RecordConstructor(fieldBindings);
 
-			FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
-			Query query = new Query();
-			query.setBody(flowgr);
-			return query;
-		}
+            FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
+            Query query = new Query();
+            query.setBody(flowgr);
+            return query;
+        }
 
-		@Override
-		public Kind getKind() {
-			return Kind.DELETE;
-		}
+        @Override
+        public Kind getKind() {
+            return Kind.DELETE;
+        }
 
-	}
+    }
 
 }
diff --git a/asterix-app/data/fbu-dml-insert-shuffled.adm b/asterix-app/data/fbu-dml-insert-shuffled.adm
new file mode 100644
index 0000000..5e42fd2
--- /dev/null
+++ b/asterix-app/data/fbu-dml-insert-shuffled.adm
@@ -0,0 +1,29 @@
+{"id":11381089,"id-copy":11381089,"alias":"Earlene","name":"EarleneAmmons","user-since":datetime("2010-03-24T05:25:35"),"user-since-copy":datetime("2010-03-24T05:25:35"),"friend-ids":{{25392364,36996951,16110083,9799716,22893553,28551996,7706432,14225386,15633254,39395931,46707062,37226919,8532306,3765988,20939685,31136325,45222021,15355741,8760941,12045616,6890610,13560532,44914868,37744233}},"employment":[{"organization-name":"Roundhex","start-date":date("2000-06-10")}]}
+{"id":10495420,"id-copy":10495420,"alias":"Wendy","name":"WendyMcloskey","user-since":datetime("2011-04-26T23:38:24"),"user-since-copy":datetime("2011-04-26T23:38:24"),"friend-ids":{{16762653,46262691,12313140,20481262,347993,23105127,1680519,20880265,45611347,21907223,46615281,17188244,44019800,46943250,28647738,16792673,29406270,42714079}},"employment":[{"organization-name":"Qvohouse","start-date":date("2008-08-27")}]}
+{"id":10957867,"id-copy":10957867,"alias":"Zach","name":"ZachOppenheimer","user-since":datetime("2012-01-01T14:40:11"),"user-since-copy":datetime("2012-01-01T14:40:11"),"friend-ids":{{27759480,2112389,8560433,10052851,37714587,16717012,36648956,44803993,36030695,5359496,32302980,27143894,19287706}},"employment":[{"organization-name":"Tanzumbam","start-date":date("2003-05-14"),"end-date":date("2004-02-23")}]}
+{"id":9988417,"id-copy":9988417,"alias":"Coline","name":"ColineLane","user-since":datetime("2010-01-01T00:12:39"),"user-since-copy":datetime("2010-01-01T00:12:39"),"friend-ids":{{17656229,42804152}},"employment":[{"organization-name":"Fax-fax","start-date":date("2012-05-01")}]}
+{"id":10272571,"id-copy":10272571,"alias":"Jarrett","name":"JarrettGoldvogel","user-since":datetime("2010-04-28T23:24:22"),"user-since-copy":datetime("2010-04-28T23:24:22"),"friend-ids":{{47024505,36647273,32152567,28239957,11739703,47515825,17408763,41224279,41487670,43339913}},"employment":[{"organization-name":"Transhigh","start-date":date("2004-04-06"),"end-date":date("2010-02-14")}]}
+{"id":11307946,"id-copy":11307946,"alias":"Helga","name":"HelgaStough","user-since":datetime("2007-01-12T21:50:11"),"user-since-copy":datetime("2007-01-12T21:50:11"),"friend-ids":{{22768365}},"employment":[{"organization-name":"subtam","start-date":date("2007-01-04"),"end-date":date("2009-06-25")}]}
+{"id":11061631,"id-copy":11061631,"alias":"Maxene","name":"MaxeneKellogg","user-since":datetime("2005-11-13T01:09:31"),"user-since-copy":datetime("2005-11-13T01:09:31"),"friend-ids":{{31578394,39466620,35741359,14244925,3000582,39031643,5008430,18315325,30440631,37868108,12014032,32314102,42887702,1853960,28022174,2024670,38864358,42073112,16259942,34693959,25315399,37475597,33599283}},"employment":[{"organization-name":"Unijobam","start-date":date("2008-05-13")}]}
+{"id":10874791,"id-copy":10874791,"alias":"Haydee","name":"HaydeeGarratt","user-since":datetime("2007-04-14T00:19:00"),"user-since-copy":datetime("2007-04-14T00:19:00"),"friend-ids":{{12247794,10306863,33161811,43877113,37745696}},"employment":[{"organization-name":"Opeelectronics","start-date":date("2008-03-07"),"end-date":date("2011-12-27")}]}
+{"id":11570326,"id-copy":11570326,"alias":"Linden","name":"LindenFilby","user-since":datetime("2007-08-16T03:11:11"),"user-since-copy":datetime("2007-08-16T03:11:11"),"friend-ids":{{6549689,15243636,3147666}},"employment":[{"organization-name":"Solfix","start-date":date("2010-02-23"),"end-date":date("2010-04-22")}]}
+{"id":10498285,"id-copy":10498285,"alias":"Kiley","name":"KileyBridger","user-since":datetime("2006-05-14T21:55:34"),"user-since-copy":datetime("2006-05-14T21:55:34"),"friend-ids":{{38780484,46190003,905670,35609390,46621151,5099226,24328595,16340411,13326485,13872400,35896828,9196151,8525875,7461206,28379538,46461267,45270205,35718577,5310596,7080391}},"employment":[{"organization-name":"Newcom","start-date":date("2009-11-11"),"end-date":date("2009-06-23")}]}
+{"id":9629395,"id-copy":9629395,"alias":"Julius","name":"JuliusWire","user-since":datetime("2008-03-22T13:36:24"),"user-since-copy":datetime("2008-03-22T13:36:24"),"friend-ids":{{}},"employment":[{"organization-name":"Tranzap","start-date":date("2006-11-19")}]}
+{"id":11447332,"id-copy":11447332,"alias":"Sherisse","name":"SherisseMaugham","user-since":datetime("2012-02-09T14:21:08"),"user-since-copy":datetime("2012-02-09T14:21:08"),"friend-ids":{{}},"employment":[{"organization-name":"Tripplelane","start-date":date("2011-09-16")}]}
+{"id":10179538,"id-copy":10179538,"alias":"Orlando","name":"OrlandoBaxter","user-since":datetime("2006-02-06T08:33:07"),"user-since-copy":datetime("2006-02-06T08:33:07"),"friend-ids":{{6233497,33888281,44259464,19279042,22534429,13084190,38886041,41675566,3155617}},"employment":[{"organization-name":"Ontohothex","start-date":date("2009-07-06")}]}
+{"id":10001080,"id-copy":10001080,"alias":"Garrett","name":"GarrettBode","user-since":datetime("2005-10-25T18:07:35"),"user-since-copy":datetime("2005-10-25T18:07:35"),"friend-ids":{{35858744,16426061,11473961,4769664,29038930,33070686,46271872,42593454,36202882,46642640,22243678,20222041,29014540,7389258,7172909,12787979,146736,21081030,21615179,2936936,44934891}},"employment":[{"organization-name":"Tanzimcare","start-date":date("2007-06-24")}]}
+{"id":11675221,"id-copy":11675221,"alias":"Calanthe","name":"CalantheGearhart","user-since":datetime("2007-06-08T02:44:20"),"user-since-copy":datetime("2007-06-08T02:44:20"),"friend-ids":{{19185575}},"employment":[{"organization-name":"Vivaace","start-date":date("2010-05-21")}]}
+{"id":11140213,"id-copy":11140213,"alias":"Montgomery","name":"MontgomeryWhittier","user-since":datetime("2007-06-19T17:46:13"),"user-since-copy":datetime("2007-06-19T17:46:13"),"friend-ids":{{32831460,6030454,30437362,21866470,17388602,40815157,20000967,47555494,5818137,40634742,21692148,2365521,33290069,46471164,9192561,35768343,7552168,3577338,5346012,31129868}},"employment":[{"organization-name":"Y-geohex","start-date":date("2008-02-24")}]}
+{"id":11954992,"id-copy":11954992,"alias":"Caitlin","name":"CaitlinLangston","user-since":datetime("2007-01-02T01:50:34"),"user-since-copy":datetime("2007-01-02T01:50:34"),"friend-ids":{{23355687,22474136,28513847,32515387,44041844,33706721,10874992,36341753,34431157,16146113,15462591,18188151,29554174,44940738,25888018,42795884,14382632,12734889,11724519,15830341,25725320,37580394,24124411,47984339}},"employment":[{"organization-name":"Kanelectrics","start-date":date("2010-05-26"),"end-date":date("2010-03-28")}]}
+{"id":9510451,"id-copy":9510451,"alias":"Chuck","name":"ChuckFinck","user-since":datetime("2011-09-10T08:27:31"),"user-since-copy":datetime("2011-09-10T08:27:31"),"friend-ids":{{5559039,8997599,8311284,20478562,13734713,21511695,30393493}},"employment":[{"organization-name":"Inchdox","start-date":date("2001-10-12")}]}
+{"id":11068231,"id-copy":11068231,"alias":"Dinah","name":"DinahSwink","user-since":datetime("2012-05-02T04:24:33"),"user-since-copy":datetime("2012-05-02T04:24:33"),"friend-ids":{{31542440,17451543,32642661,27867264,32718667,43042567,7921827}},"employment":[{"organization-name":"highfax","start-date":date("2003-04-10"),"end-date":date("2003-10-03")}]}
+{"id":10361965,"id-copy":10361965,"alias":"Arlen","name":"ArlenFlick","user-since":datetime("2011-07-14T18:38:37"),"user-since-copy":datetime("2011-07-14T18:38:37"),"friend-ids":{{34249140,2887282,47622716,3897801,33692288,14374380,14183995,41311739,6378075,17721901,20807501,8908974,41080464,26497672}},"employment":[{"organization-name":"Medflex","start-date":date("2008-05-18"),"end-date":date("2011-09-18")}]}
+{"id":10423588,"id-copy":10423588,"alias":"Shirlene","name":"ShirleneRuch","user-since":datetime("2006-04-09T05:52:24"),"user-since-copy":datetime("2006-04-09T05:52:24"),"friend-ids":{{15418780,12724265,27282306,13592995,24753166,32824252,40619106,27563604,12337625,45387219,27749581,44912564,37470078,19663516}},"employment":[{"organization-name":"Newphase","start-date":date("2003-06-17")}]}
+{"id":11951098,"id-copy":11951098,"alias":"Tera","name":"TeraByers","user-since":datetime("2012-08-03T19:41:26"),"user-since-copy":datetime("2012-08-03T19:41:26"),"friend-ids":{{15537238,13699967,10587728,23542817,12703626,25024772,19223339,5547239,42576945,27351017,22726496,25268071,4361323,24631578,38669047,44781738,34646381}},"employment":[{"organization-name":"Sublamdox","start-date":date("2008-01-04"),"end-date":date("2011-01-14")}]}
+{"id":9594523,"id-copy":9594523,"alias":"Tam","name":"TamWillcox","user-since":datetime("2011-12-23T11:41:58"),"user-since-copy":datetime("2011-12-23T11:41:58"),"friend-ids":{{27383896,20745988,10063024,8241427,40299998,32408463,25171835,22380586,15344194,25951348,28733234,45421004,2273747,2229862,6241144,6704115,8659430,47431991,47929530,24393021}},"employment":[{"organization-name":"Keytech","start-date":date("2001-07-27")}]}
+{"id":9478720,"id-copy":9478720,"alias":"Angelia","name":"AngeliaKettlewell","user-since":datetime("2005-05-27T06:29:30"),"user-since-copy":datetime("2005-05-27T06:29:30"),"friend-ids":{{42556433,20033025,38112512,19420757,31822717,7116081,39544900,19203395,46787205,32303456,4509345,45558040,42616291,6929369,9272653,37459048,37113569,38942369,47741031,46761451,14163845}},"employment":[{"organization-name":"Alphadax","start-date":date("2012-03-28"),"end-date":date("2012-03-04")}]}
+{"id":9142198,"id-copy":9142198,"alias":"Sherry","name":"SherryFea","user-since":datetime("2011-03-28T23:09:22"),"user-since-copy":datetime("2011-03-28T23:09:22"),"friend-ids":{{6835080,34471872,30942941,34858577,5996593,47293442,43097072,44809621,33969893,26410931,6628186,29944391,35957320,20326929,40284077,11681583,43878314,40265961,16871274,28406169,1349311}},"employment":[{"organization-name":"Mathtech","start-date":date("2004-07-28")}]}
+{"id":9313492,"id-copy":9313492,"alias":"Tera","name":"TeraWolfe","user-since":datetime("2010-12-20T12:47:25"),"user-since-copy":datetime("2010-12-20T12:47:25"),"friend-ids":{{45424983,18345704,14849759,31638064,38670515,48015953,36114769}},"employment":[{"organization-name":"Redelectronics","start-date":date("2001-04-26"),"end-date":date("2004-12-06")}]}
+{"id":10307032,"id-copy":10307032,"alias":"Quentin","name":"QuentinSauter","user-since":datetime("2012-07-11T07:16:43"),"user-since-copy":datetime("2012-07-11T07:16:43"),"friend-ids":{{1926278,42211794,1508832,14973540,6721046,28872485,5047722,7805271,31508326,20891455,38735410,13190567,18209753,44468536,34640135,47290587,25576626}},"employment":[{"organization-name":"Zamcorporation","start-date":date("2012-02-13")}]}
+{"id":10733617,"id-copy":10733617,"alias":"Leonardo","name":"LeonardoKight","user-since":datetime("2008-10-20T17:30:29"),"user-since-copy":datetime("2008-10-20T17:30:29"),"friend-ids":{{39687903,7235506,34696496,25995345,18435380,47473591,15710408,44232442,39520147,36384026,25160887,245860,1195579,4587411,536916,47052672,33953823,13203710}},"employment":[{"organization-name":"tresline","start-date":date("2007-07-12"),"end-date":date("2010-03-16")}]}
+{"id":10394488,"id-copy":10394488,"alias":"Oswald","name":"OswaldRay","user-since":datetime("2006-02-12T17:39:23"),"user-since-copy":datetime("2006-02-12T17:39:23"),"friend-ids":{{14370372,14174983,7749259,39375970,1755409,9056913}},"employment":[{"organization-name":"Canline","start-date":date("2011-12-04"),"end-date":date("2011-06-08")}]}
diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index d517837..7012a15 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -183,12 +183,14 @@
                         <type>test-jar</type>
                         <scope>test</scope>
                 </dependency>
+                <!-- posssible remove this
 		<dependency>
 			<groupId>com.kenai.nbpwr</groupId>
 			<artifactId>org-apache-commons-io</artifactId>
 			<version>1.3.1-201002241208</version>
 			<scope>test</scope>
 		</dependency>
+                 -->
 		<dependency>
 			<groupId>org.apache.hadoop</groupId>
 			<artifactId>hadoop-core</artifactId>
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
index 862c701..438bc74 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
@@ -26,8 +26,8 @@
 import edu.uci.ics.asterix.aql.expression.Query;
 import edu.uci.ics.asterix.aql.expression.visitor.AQLPrintVisitor;
 import edu.uci.ics.asterix.aql.rewrites.AqlRewriter;
-import edu.uci.ics.asterix.common.api.AsterixAppContextInfoImpl;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfo;
+import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
 import edu.uci.ics.asterix.common.config.OptimizationConfUtil;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.common.AqlExpressionTypeComputer;
@@ -258,7 +258,8 @@
             }
         }
 
-        int frameSize = GlobalConfig.getFrameSize();
+        AsterixCompilerProperties compilerProperties = AsterixAppContextInfo.getInstance().getCompilerProperties();
+        int frameSize = compilerProperties.getFrameSize();
 
         HeuristicCompilerFactoryBuilder builder = new HeuristicCompilerFactoryBuilder(
                 AqlOptimizationContextFactory.INSTANCE);
@@ -342,7 +343,7 @@
 
         IJobletEventListenerFactory jobEventListenerFactory = new JobEventListenerFactory(asterixJobId,
                 isWriteTransaction);
-        JobSpecification spec = compiler.createJob(AsterixAppContextInfoImpl.getInstance(), jobEventListenerFactory);
+        JobSpecification spec = compiler.createJob(AsterixAppContextInfo.getInstance(), jobEventListenerFactory);
 
         if (pc.isPrintJob()) {
             switch (pdf) {
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
index d207acb..272050f 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -48,6 +48,7 @@
         ncConfig1.clusterNetIPAddress = "127.0.0.1";
         ncConfig1.dataIPAddress = "127.0.0.1";
         ncConfig1.datasetIPAddress = "127.0.0.1";
+        ncConfig1.resultHistorySize = 1000;
         ncConfig1.nodeId = NC1_ID;
         ncConfig1.appNCMainClass = NCApplicationEntryPoint.class.getName();
         nc1 = new NodeControllerService(ncConfig1);
@@ -59,6 +60,7 @@
         ncConfig2.clusterNetIPAddress = "127.0.0.1";
         ncConfig2.dataIPAddress = "127.0.0.1";
         ncConfig2.datasetIPAddress = "127.0.0.1";
+        ncConfig2.resultHistorySize = 1000;
         ncConfig2.nodeId = NC2_ID;
         ncConfig2.appNCMainClass = NCApplicationEntryPoint.class.getName();
         nc2 = new NodeControllerService(ncConfig2);
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
index 29feb5e..2c15578 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
@@ -17,6 +17,7 @@
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.util.List;
+import java.util.logging.Level;
 
 import javax.servlet.ServletContext;
 import javax.servlet.http.HttpServlet;
@@ -32,6 +33,7 @@
 import edu.uci.ics.asterix.aql.parser.AQLParser;
 import edu.uci.ics.asterix.aql.parser.ParseException;
 import edu.uci.ics.asterix.aql.translator.AqlTranslator;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
 import edu.uci.ics.asterix.metadata.MetadataManager;
 import edu.uci.ics.asterix.result.ResultReader;
 import edu.uci.ics.asterix.result.ResultUtils;
@@ -95,6 +97,7 @@
             aqlTranslator.compileAndExecute(hcc, hds, asyncResults);
 
         } catch (ParseException pe) {
+            GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.getMessage(), pe);
             StringBuilder errorMessage = new StringBuilder();
             String message = pe.getMessage();
             message = message.replace("<", "&lt");
@@ -110,6 +113,7 @@
             JSONObject errorResp = ResultUtils.getErrorResponse(2, errorMessage.toString());
             out.write(errorResp.toString());
         } catch (Exception e) {
+            GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, e.getMessage(), e);
             StringBuilder errorMessage = new StringBuilder();
             errorMessage.append(e.getMessage());
             JSONObject errorResp = ResultUtils.getErrorResponse(99, errorMessage.toString());
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index 41dde5d..66a3980 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -64,6 +64,7 @@
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.common.functions.FunctionSignature;
 import edu.uci.ics.asterix.file.DatasetOperations;
+import edu.uci.ics.asterix.file.DataverseOperations;
 import edu.uci.ics.asterix.file.FeedOperations;
 import edu.uci.ics.asterix.file.IndexOperations;
 import edu.uci.ics.asterix.formats.base.IDataFormat;
@@ -123,6 +124,11 @@
  */
 public class AqlTranslator extends AbstractAqlTranslator {
 
+    private enum ProgressState {
+        NO_PROGRESS,
+        ADDED_PENDINGOP_RECORD_TO_METADATA
+    }
+
     private final List<Statement> aqlStatements;
     private final PrintWriter out;
     private final SessionConfig sessionConfig;
@@ -311,7 +317,7 @@
     }
 
     private Dataverse handleUseDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws MetadataException, RemoteException, ACIDException {
+            throws Exception {
 
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -327,15 +333,14 @@
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             return dv;
         } catch (Exception e) {
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            abort(e, e, mdTxnCtx);
             throw new MetadataException(e);
         } finally {
             releaseReadLatch();
         }
     }
 
-    private void handleCreateDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws MetadataException, AlgebricksException, RemoteException, ACIDException {
+    private void handleCreateDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
 
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -345,14 +350,19 @@
             CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
             String dvName = stmtCreateDataverse.getDataverseName().getValue();
             Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
-            if (dv != null && !stmtCreateDataverse.getIfNotExists()) {
-                throw new AlgebricksException("A dataverse with this name " + dvName + " already exists.");
+            if (dv != null) {
+                if (stmtCreateDataverse.getIfNotExists()) {
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    return;
+                } else {
+                    throw new AlgebricksException("A dataverse with this name " + dvName + " already exists.");
+                }
             }
             MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dvName,
                     stmtCreateDataverse.getFormat(), IMetadataEntity.PENDING_NO_OP));
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            abort(e, e, mdTxnCtx);
             throw new AlgebricksException(e);
         } finally {
             releaseWriteLatch();
@@ -362,6 +372,7 @@
     private void handleCreateDatasetStatement(AqlMetadataProvider metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws AsterixException, Exception {
 
+        ProgressState progress = ProgressState.NO_PROGRESS;
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -441,6 +452,11 @@
                 }
             }
 
+            //#. initialize DatasetIdFactory if it is not initialized.
+            if (!DatasetIdFactory.isInitialized()) {
+                DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
+            }
+
             //#. add a new dataset with PendingAddOp
             dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType,
                     DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
@@ -455,6 +471,7 @@
                 //#. make metadataTxn commit before calling runJob.
                 MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                 bActiveTxn = false;
+                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
 
                 //#. runJob
                 runJob(hcc, jobSpec, true);
@@ -467,18 +484,21 @@
 
             //#. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
             MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
-            MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), new Dataset(dataverseName,
-                    datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType, dataset.getDatasetId(),
-                    IMetadataEntity.PENDING_NO_OP));
+            dataset.setPendingOp(IMetadataEntity.PENDING_NO_OP);
+            MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
 
-            if (dataset != null) {
+            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+
                 //#. execute compensation operations
                 //   remove the index in NC
+                //   [Notice]
+                //   As long as we updated(and committed) metadata, we should remove any effect of the job 
+                //   because an exception occurs during runJob.
                 mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                 bActiveTxn = true;
                 metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -489,11 +509,11 @@
                     bActiveTxn = false;
 
                     runJob(hcc, jobSpec, true);
-                } catch (Exception e3) {
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
                     if (bActiveTxn) {
-                        MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                        abort(e, e2, mdTxnCtx);
                     }
-                    //do no throw exception since still the metadata needs to be compensated. 
                 }
 
                 //   remove the record from the metadata.
@@ -504,8 +524,10 @@
                             datasetName);
                     MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                 } catch (Exception e2) {
-                    MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-                    throw new AlgebricksException(e2);
+                    e.addSuppressed(e2);
+                    abort(e, e2, mdTxnCtx);
+                    throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
+                            + "." + datasetName + ") couldn't be removed from the metadata", e);
                 }
             }
 
@@ -518,6 +540,7 @@
     private void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
 
+        ProgressState progress = ProgressState.NO_PROGRESS;
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -555,12 +578,11 @@
             aRecordType.validateKeyFields(stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getIndexType());
 
             if (idx != null) {
-                if (!stmtCreateIndex.getIfNotExists()) {
-                    throw new AlgebricksException("An index with this name " + indexName + " already exists.");
-                } else {
-                    stmtCreateIndex.setNeedToCreate(false);
+                if (stmtCreateIndex.getIfNotExists()) {
                     MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                     return;
+                } else {
+                    throw new AlgebricksException("An index with this name " + indexName + " already exists.");
                 }
             }
 
@@ -570,7 +592,7 @@
                     IMetadataEntity.PENDING_ADD_OP);
             MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
 
-            //#. create the index artifact in NC.
+            //#. prepare to create the index artifact in NC.
             CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
                     index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
             spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
@@ -580,7 +602,9 @@
             }
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
+            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
 
+            //#. create the index artifact in NC.
             runJob(hcc, spec, true);
 
             mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
@@ -604,18 +628,16 @@
             //#. add another new index with PendingNoOp after deleting the index with PendingAddOp
             MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
                     indexName);
-            index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
-                    stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getGramLength(), false,
-                    IMetadataEntity.PENDING_NO_OP);
+            index.setPendingOp(IMetadataEntity.PENDING_NO_OP);
             MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
 
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
 
-            if (spec != null) {
+            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
                 //#. execute compensation operations
                 //   remove the index in NC
                 mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
@@ -628,11 +650,11 @@
                     bActiveTxn = false;
 
                     runJob(hcc, jobSpec, true);
-                } catch (Exception e3) {
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
                     if (bActiveTxn) {
-                        MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                        abort(e, e2, mdTxnCtx);
                     }
-                    //do no throw exception since still the metadata needs to be compensated. 
                 }
 
                 //   remove the record from the metadata.
@@ -643,8 +665,10 @@
                             datasetName, indexName);
                     MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                 } catch (Exception e2) {
-                    MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-                    throw new AlgebricksException(e2);
+                    e.addSuppressed(e2);
+                    abort(e, e2, mdTxnCtx);
+                    throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName
+                            + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
                 }
             }
             throw new AlgebricksException(e);
@@ -653,8 +677,7 @@
         }
     }
 
-    private void handleCreateTypeStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws AlgebricksException, RemoteException, ACIDException, MetadataException {
+    private void handleCreateTypeStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
 
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -690,7 +713,7 @@
             }
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            abort(e, e, mdTxnCtx);
             throw new AlgebricksException(e);
         } finally {
             releaseWriteLatch();
@@ -700,56 +723,61 @@
     private void handleDataverseDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
 
+        ProgressState progress = ProgressState.NO_PROGRESS;
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         acquireWriteLatch();
 
-        String dvName = null;
+        String dataverseName = null;
         List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
         try {
             DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
-            dvName = stmtDelete.getDataverseName().getValue();
+            dataverseName = stmtDelete.getDataverseName().getValue();
 
-            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dvName);
+            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
             if (dv == null) {
-                if (!stmtDelete.getIfExists()) {
-                    throw new AlgebricksException("There is no dataverse with this name " + dvName + ".");
+                if (stmtDelete.getIfExists()) {
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    return;
+                } else {
+                    throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
                 }
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                return;
             }
 
             //#. prepare jobs which will drop corresponding datasets with indexes. 
-            List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dvName);
+            List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
             for (int j = 0; j < datasets.size(); j++) {
                 String datasetName = datasets.get(j).getDatasetName();
                 DatasetType dsType = datasets.get(j).getDatasetType();
                 if (dsType == DatasetType.INTERNAL || dsType == DatasetType.FEED) {
 
-                    List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dvName, datasetName);
+                    List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
+                            datasetName);
                     for (int k = 0; k < indexes.size(); k++) {
                         if (indexes.get(k).isSecondaryIndex()) {
-                            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dvName, datasetName,
+                            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
                                     indexes.get(k).getIndexName());
                             jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
                         }
                     }
 
-                    CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dvName, datasetName);
+                    CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
                     jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
                 }
             }
+            jobsToExecute.add(DataverseOperations.createDropDataverseJobSpec(dv, metadataProvider));
 
             //#. mark PendingDropOp on the dataverse record by 
             //   first, deleting the dataverse record from the DATAVERSE_DATASET
             //   second, inserting the dataverse record with the PendingDropOp value into the DATAVERSE_DATASET
-            MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dvName);
-            MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dvName, dv.getDataFormat(),
+            MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
+            MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dv.getDataFormat(),
                     IMetadataEntity.PENDING_DROP_OP));
 
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;
+            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
 
             for (JobSpecification jobSpec : jobsToExecute) {
                 runJob(hcc, jobSpec, true);
@@ -760,32 +788,44 @@
             metadataProvider.setMetadataTxnContext(mdTxnCtx);
 
             //#. finally, delete the dataverse.
-            MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dvName);
-            if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dvName) {
+            MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
+            if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
                 activeDefaultDataverse = null;
             }
 
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
 
-            //#. execute compensation operations
-            //   remove the all indexes in NC
-            for (JobSpecification jobSpec : jobsToExecute) {
-                runJob(hcc, jobSpec, true);
-            }
+            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+                if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
+                    activeDefaultDataverse = null;
+                }
 
-            //   remove the record from the metadata.
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            try {
-                MetadataManager.INSTANCE.dropDataverse(metadataProvider.getMetadataTxnContext(), dvName);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            } catch (Exception e2) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-                throw new AlgebricksException(e2);
+                //#. execute compensation operations
+                //   remove the all indexes in NC
+                try {
+                    for (JobSpecification jobSpec : jobsToExecute) {
+                        runJob(hcc, jobSpec, true);
+                    }
+                } catch (Exception e2) {
+                    //do no throw exception since still the metadata needs to be compensated. 
+                    e.addSuppressed(e2);
+                }
+
+                //   remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                try {
+                    MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
+                    abort(e, e2, mdTxnCtx);
+                    throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName
+                            + ") couldn't be removed from the metadata", e);
+                }
             }
 
             throw new AlgebricksException(e);
@@ -797,6 +837,7 @@
     private void handleDatasetDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
 
+        ProgressState progress = ProgressState.NO_PROGRESS;
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -816,12 +857,13 @@
 
             Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
             if (ds == null) {
-                if (!stmtDelete.getIfExists()) {
+                if (stmtDelete.getIfExists()) {
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    return;
+                } else {
                     throw new AlgebricksException("There is no dataset with this name " + datasetName
                             + " in dataverse " + dataverseName + ".");
                 }
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                return;
             }
 
             if (ds.getDatasetType() == DatasetType.INTERNAL || ds.getDatasetType() == DatasetType.FEED) {
@@ -847,6 +889,7 @@
 
                 MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                 bActiveTxn = false;
+                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
 
                 //#. run the jobs
                 for (JobSpecification jobSpec : jobsToExecute) {
@@ -864,25 +907,34 @@
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
 
-            //#. execute compensation operations
-            //   remove the all indexes in NC
-            for (JobSpecification jobSpec : jobsToExecute) {
-                runJob(hcc, jobSpec, true);
-            }
+            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+                //#. execute compensation operations
+                //   remove the all indexes in NC
+                try {
+                    for (JobSpecification jobSpec : jobsToExecute) {
+                        runJob(hcc, jobSpec, true);
+                    }
+                } catch (Exception e2) {
+                    //do no throw exception since still the metadata needs to be compensated. 
+                    e.addSuppressed(e2);
+                }
 
-            //   remove the record from the metadata.
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            try {
-                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
-                        datasetName);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            } catch (Exception e2) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-                throw new AlgebricksException(e2);
+                //   remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                try {
+                    MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
+                            datasetName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
+                    abort(e, e2, mdTxnCtx);
+                    throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
+                            + "." + datasetName + ") couldn't be removed from the metadata", e);
+                }
             }
 
             throw new AlgebricksException(e);
@@ -894,6 +946,7 @@
     private void handleIndexDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
 
+        ProgressState progress = ProgressState.NO_PROGRESS;
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -922,38 +975,39 @@
                 indexName = stmtIndexDrop.getIndexName().getValue();
                 Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
                 if (index == null) {
-                    if (!stmtIndexDrop.getIfExists()) {
+                    if (stmtIndexDrop.getIfExists()) {
+                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                        return;
+                    } else {
                         throw new AlgebricksException("There is no index with this name " + indexName + ".");
                     }
-                } else {
-                    //#. prepare a job to drop the index in NC.
-                    CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
-                            indexName);
-                    jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
-
-                    //#. mark PendingDropOp on the existing index
-                    MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
-                    MetadataManager.INSTANCE.addIndex(
-                            mdTxnCtx,
-                            new Index(dataverseName, datasetName, indexName, index.getIndexType(), index
-                                    .getKeyFieldNames(), index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
-
-                    //#. commit the existing transaction before calling runJob. 
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    bActiveTxn = false;
-
-                    for (JobSpecification jobSpec : jobsToExecute) {
-                        runJob(hcc, jobSpec, true);
-                    }
-
-                    //#. begin a new transaction
-                    mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                    bActiveTxn = true;
-                    metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-                    //#. finally, delete the existing index
-                    MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
                 }
+                //#. prepare a job to drop the index in NC.
+                CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
+                jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
+
+                //#. mark PendingDropOp on the existing index
+                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
+                MetadataManager.INSTANCE.addIndex(mdTxnCtx,
+                        new Index(dataverseName, datasetName, indexName, index.getIndexType(),
+                                index.getKeyFieldNames(), index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
+
+                //#. commit the existing transaction before calling runJob. 
+                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                bActiveTxn = false;
+                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+
+                for (JobSpecification jobSpec : jobsToExecute) {
+                    runJob(hcc, jobSpec, true);
+                }
+
+                //#. begin a new transaction
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+                //#. finally, delete the existing index
+                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
             } else {
                 throw new AlgebricksException(datasetName
                         + " is an external dataset. Indexes are not maintained for external datasets.");
@@ -962,25 +1016,34 @@
 
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
 
-            //#. execute compensation operations
-            //   remove the all indexes in NC
-            for (JobSpecification jobSpec : jobsToExecute) {
-                runJob(hcc, jobSpec, true);
-            }
+            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+                //#. execute compensation operations
+                //   remove the all indexes in NC
+                try {
+                    for (JobSpecification jobSpec : jobsToExecute) {
+                        runJob(hcc, jobSpec, true);
+                    }
+                } catch (Exception e2) {
+                    //do no throw exception since still the metadata needs to be compensated.
+                    e.addSuppressed(e2);
+                }
 
-            //   remove the record from the metadata.
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            try {
-                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
-                        datasetName, indexName);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            } catch (Exception e2) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-                throw new AlgebricksException(e2);
+                //   remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                try {
+                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
+                            datasetName, indexName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
+                    abort(e, e2, mdTxnCtx);
+                    throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName
+                            + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
+                }
             }
 
             throw new AlgebricksException(e);
@@ -990,8 +1053,7 @@
         }
     }
 
-    private void handleTypeDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws AlgebricksException, MetadataException, RemoteException, ACIDException {
+    private void handleTypeDropStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
 
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -1014,15 +1076,14 @@
             }
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            abort(e, e, mdTxnCtx);
             throw new AlgebricksException(e);
         } finally {
             releaseWriteLatch();
         }
     }
 
-    private void handleNodegroupDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws MetadataException, AlgebricksException, RemoteException, ACIDException {
+    private void handleNodegroupDropStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
 
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -1041,15 +1102,14 @@
 
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            abort(e, e, mdTxnCtx);
             throw new AlgebricksException(e);
         } finally {
             releaseWriteLatch();
         }
     }
 
-    private void handleCreateFunctionStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws AlgebricksException, MetadataException, RemoteException, ACIDException {
+    private void handleCreateFunctionStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         acquireWriteLatch();
@@ -1072,15 +1132,14 @@
 
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            abort(e, e, mdTxnCtx);
             throw new AlgebricksException(e);
         } finally {
             releaseWriteLatch();
         }
     }
 
-    private void handleFunctionDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws MetadataException, RemoteException, ACIDException, AlgebricksException {
+    private void handleFunctionDropStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         acquireWriteLatch();
@@ -1097,7 +1156,7 @@
             }
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            abort(e, e, mdTxnCtx);
             throw new AlgebricksException(e);
         } finally {
             releaseWriteLatch();
@@ -1144,7 +1203,7 @@
             }
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
 
             throw new AlgebricksException(e);
@@ -1176,7 +1235,7 @@
             }
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
             throw new AlgebricksException(e);
         } finally {
@@ -1210,7 +1269,7 @@
 
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
             throw new AlgebricksException(e);
         } finally {
@@ -1232,8 +1291,8 @@
             String dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
                     : activeDefaultDataverse.getDataverseName() : stmtDelete.getDataverseName().getValue();
             CompiledDeleteStatement clfrqs = new CompiledDeleteStatement(stmtDelete.getVariableExpr(), dataverseName,
-                    stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getDieClause(),
-                    stmtDelete.getVarCounter(), metadataProvider);
+                    stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getVarCounter(),
+                    metadataProvider);
             JobSpecification compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
 
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -1245,7 +1304,7 @@
 
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
             throw new AlgebricksException(e);
         } finally {
@@ -1310,7 +1369,7 @@
 
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
             throw new AlgebricksException(e);
         } finally {
@@ -1340,7 +1399,7 @@
 
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
             throw new AlgebricksException(e);
         } finally {
@@ -1402,7 +1461,7 @@
             return queryResult;
         } catch (Exception e) {
             if (bActiveTxn) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                abort(e, e, mdTxnCtx);
             }
             throw new AlgebricksException(e);
         } finally {
@@ -1410,8 +1469,7 @@
         }
     }
 
-    private void handleCreateNodeGroupStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws MetadataException, AlgebricksException, RemoteException, ACIDException {
+    private void handleCreateNodeGroupStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
 
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -1434,7 +1492,7 @@
             }
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            abort(e, e, mdTxnCtx);
             throw new AlgebricksException(e);
         } finally {
             releaseWriteLatch();
@@ -1489,4 +1547,13 @@
     private void releaseReadLatch() {
         MetadataManager.INSTANCE.releaseReadLatch();
     }
-}
+
+    private void abort(Exception rootE, Exception parentE, MetadataTransactionContext mdTxnCtx) {
+        try {
+            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+        } catch (Exception e2) {
+            parentE.addSuppressed(e2);
+            throw new IllegalStateException(rootE);
+        }
+    }
+}
\ No newline at end of file
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
index b9b9524..25a2551 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
@@ -22,6 +22,8 @@
 import java.util.logging.Logger;
 
 import edu.uci.ics.asterix.api.common.Job;
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfo;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.config.GlobalConfig;
 import edu.uci.ics.asterix.common.config.OptimizationConfUtil;
@@ -118,14 +120,15 @@
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider
                 .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(dataset.getDataverseName(), datasetName,
                         datasetName);
+        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
         IndexDropOperatorDescriptor primaryBtreeDrop = new IndexDropOperatorDescriptor(specPrimary,
                 AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER, AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER,
                 splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE,
-                        GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES));
+                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
+                        storageProperties.getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()));
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(specPrimary, primaryBtreeDrop,
                 splitsAndConstraint.second);
 
@@ -163,10 +166,11 @@
         }
         LOGGER.info("CREATING File Splits: " + sb.toString());
 
+        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
         //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
         ILocalResourceMetadata localResourceMetadata = new LSMBTreeLocalResourceMetadata(typeTraits,
-                comparatorFactories, blooFilterKeyFields, true, GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE,
-                GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES);
+                comparatorFactories, blooFilterKeyFields, true, storageProperties.getMemoryComponentPageSize(),
+                storageProperties.getMemoryComponentNumPages());
         ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
                 localResourceMetadata, LocalResource.LSMBTreeResource);
 
@@ -176,9 +180,9 @@
                 new LSMBTreeDataflowHelperFactory(AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE,
-                        GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES), localResourceFactoryProvider,
-                NoOpOperationCallbackFactory.INSTANCE);
+                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, storageProperties
+                                .getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()),
+                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexCreateOp,
                 splitsAndConstraint.second);
         spec.addRoot(indexCreateOp);
@@ -258,15 +262,17 @@
             numElementsHint = Long.parseLong(dataset.getHints().get("CARDINALITY"));
         }
 
+        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
         TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
                 AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER, AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER,
                 splitsAndConstraint.first, typeTraits, comparatorFactories, blooFilterKeyFields, fieldPermutation,
-                GlobalConfig.DEFAULT_BTREE_FILL_FACTOR, false, numElementsHint, new LSMBTreeDataflowHelperFactory(
+                GlobalConfig.DEFAULT_BTREE_FILL_FACTOR, false, numElementsHint,
+                new LSMBTreeDataflowHelperFactory(AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE,
-                        GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES), NoOpOperationCallbackFactory.INSTANCE);
+                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, storageProperties
+                                .getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()),
+                NoOpOperationCallbackFactory.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeBulkLoad,
                 splitsAndConstraint.second);
 
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DataverseOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DataverseOperations.java
new file mode 100644
index 0000000..0654ff7
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DataverseOperations.java
@@ -0,0 +1,22 @@
+package edu.uci.ics.asterix.file;
+
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.FileRemoveOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+
+public class DataverseOperations {
+    public static JobSpecification createDropDataverseJobSpec(Dataverse dataverse, AqlMetadataProvider metadata) {
+        JobSpecification jobSpec = new JobSpecification();
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
+                .splitProviderAndPartitionConstraintsForDataverse(dataverse.getDataverseName());
+        FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(jobSpec, splitsAndConstraint.first);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(jobSpec, frod, splitsAndConstraint.second);
+        jobSpec.addRoot(frod);
+        return jobSpec;
+    }
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
index dbbbe44..cec97ad 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
@@ -1,6 +1,7 @@
 package edu.uci.ics.asterix.file;
 
-import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfo;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
 import edu.uci.ics.asterix.common.config.OptimizationConfUtil;
 import edu.uci.ics.asterix.common.context.AsterixRuntimeComponentsProvider;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
@@ -48,14 +49,15 @@
 
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider
                 .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(dataverseName, datasetName, indexName);
+        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
         IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec,
                 AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER, AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER,
                 splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE,
-                        GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES));
+                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
+                        storageProperties.getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()));
         AlgebricksPartitionConstraintHelper
                 .setPartitionConstraintInJobSpec(spec, btreeDrop, splitsAndConstraint.second);
         spec.addRoot(btreeDrop);
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeCreator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeCreator.java
index 2a1349b..958f8d1 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeCreator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeCreator.java
@@ -1,6 +1,7 @@
 package edu.uci.ics.asterix.file;
 
-import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
 import edu.uci.ics.asterix.common.context.AsterixRuntimeComponentsProvider;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.transaction.management.resource.ILocalResourceMetadata;
@@ -26,31 +27,33 @@
 
 public class SecondaryBTreeCreator extends SecondaryIndexCreator {
 
-    protected SecondaryBTreeCreator(PhysicalOptimizationConfig physOptConf) {
-        super(physOptConf);
+    protected SecondaryBTreeCreator(PhysicalOptimizationConfig physOptConf,
+            IAsterixPropertiesProvider propertiesProvider) {
+        super(physOptConf, propertiesProvider);
     }
 
     @Override
     public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
         JobSpecification spec = new JobSpecification();
 
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
         //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
         ILocalResourceMetadata localResourceMetadata = new LSMBTreeLocalResourceMetadata(
                 secondaryRecDesc.getTypeTraits(), secondaryComparatorFactories, secondaryBloomFilterKeyFields, false,
-                GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES);
+                storageProperties.getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages());
         ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
                 localResourceMetadata, LocalResource.LSMBTreeResource);
 
         TreeIndexCreateOperatorDescriptor secondaryIndexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
                 AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER, AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER,
                 secondaryFileSplitProvider, secondaryRecDesc.getTypeTraits(), secondaryComparatorFactories,
-                secondaryBloomFilterKeyFields, new LSMBTreeDataflowHelperFactory(
+                secondaryBloomFilterKeyFields,
+                new LSMBTreeDataflowHelperFactory(AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE,
-                        GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES), localResourceFactoryProvider,
-                NoOpOperationCallbackFactory.INSTANCE);
+                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, storageProperties
+                                .getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()),
+                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
                 secondaryPartitionConstraint);
         spec.addRoot(secondaryIndexCreateOp);
@@ -80,13 +83,17 @@
         // Sort by secondary keys.
         ExternalSortOperatorDescriptor sortOp = createSortOp(spec, secondaryComparatorFactories, secondaryRecDesc);
 
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
         // Create secondary BTree bulk load op.
-        TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = createTreeIndexBulkLoadOp(spec, numSecondaryKeys,
+        TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = createTreeIndexBulkLoadOp(
+                spec,
+                numSecondaryKeys,
                 new LSMBTreeDataflowHelperFactory(AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE,
-                        GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES), BTree.DEFAULT_FILL_FACTOR);
+                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, storageProperties
+                                .getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()),
+                BTree.DEFAULT_FILL_FACTOR);
 
         // Connect the operators.
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
index e7c2e3e..83c9393 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
@@ -19,8 +19,10 @@
 import java.io.IOException;
 import java.util.List;
 
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfo;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
 import edu.uci.ics.asterix.common.context.AsterixRuntimeComponentsProvider;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
@@ -103,29 +105,34 @@
     protected RecordDescriptor secondaryRecDesc;
     protected ICopyEvaluatorFactory[] secondaryFieldAccessEvalFactories;
 
+    protected IAsterixPropertiesProvider propertiesProvider;
+
     // Prevent public construction. Should be created via createIndexCreator().
-    protected SecondaryIndexCreator(PhysicalOptimizationConfig physOptConf) {
+    protected SecondaryIndexCreator(PhysicalOptimizationConfig physOptConf,
+            IAsterixPropertiesProvider propertiesProvider) {
         this.physOptConf = physOptConf;
+        this.propertiesProvider = propertiesProvider;
     }
 
     public static SecondaryIndexCreator createIndexCreator(CompiledCreateIndexStatement createIndexStmt,
             AqlMetadataProvider metadataProvider, PhysicalOptimizationConfig physOptConf) throws AsterixException,
             AlgebricksException {
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
         SecondaryIndexCreator indexCreator = null;
         switch (createIndexStmt.getIndexType()) {
             case BTREE: {
-                indexCreator = new SecondaryBTreeCreator(physOptConf);
+                indexCreator = new SecondaryBTreeCreator(physOptConf, asterixPropertiesProvider);
                 break;
             }
             case RTREE: {
-                indexCreator = new SecondaryRTreeCreator(physOptConf);
+                indexCreator = new SecondaryRTreeCreator(physOptConf, asterixPropertiesProvider);
                 break;
             }
             case WORD_INVIX:
             case NGRAM_INVIX:
             case FUZZY_WORD_INVIX:
             case FUZZY_NGRAM_INVIX: {
-                indexCreator = new SecondaryInvertedIndexCreator(physOptConf);
+                indexCreator = new SecondaryInvertedIndexCreator(physOptConf, asterixPropertiesProvider);
                 break;
             }
             default: {
@@ -266,6 +273,7 @@
         int[] lowKeyFields = null;
         // +Infinity
         int[] highKeyFields = null;
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
         BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
                 AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER, AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER,
                 primaryFileSplitProvider, primaryRecDesc.getTypeTraits(), primaryComparatorFactories,
@@ -273,8 +281,9 @@
                 new LSMBTreeDataflowHelperFactory(AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE,
-                        GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES), false, NoOpOperationCallbackFactory.INSTANCE);
+                        AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, storageProperties
+                                .getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()), false,
+                NoOpOperationCallbackFactory.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, primarySearchOp,
                 primaryPartitionConstraint);
         return primarySearchOp;
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexCreator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexCreator.java
index 0bf379d..7b53a28 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexCreator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexCreator.java
@@ -2,8 +2,9 @@
 
 import java.util.List;
 
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
 import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
 import edu.uci.ics.asterix.common.context.AsterixRuntimeComponentsProvider;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
@@ -60,8 +61,9 @@
     private RecordDescriptor tokenKeyPairRecDesc;
     private boolean isPartitioned;
 
-    protected SecondaryInvertedIndexCreator(PhysicalOptimizationConfig physOptConf) {
-        super(physOptConf);
+    protected SecondaryInvertedIndexCreator(PhysicalOptimizationConfig physOptConf,
+            IAsterixPropertiesProvider propertiesProvider) {
+        super(physOptConf, propertiesProvider);
     }
 
     @Override
@@ -150,10 +152,12 @@
     public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
         JobSpecification spec = new JobSpecification();
 
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
         //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
         ILocalResourceMetadata localResourceMetadata = new LSMInvertedIndexLocalResourceMetadata(invListsTypeTraits,
                 primaryComparatorFactories, tokenTypeTraits, tokenComparatorFactories, tokenizerFactory,
-                GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES, isPartitioned);
+                storageProperties.getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages(),
+                isPartitioned);
         ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
                 localResourceMetadata, LocalResource.LSMInvertedIndexResource);
 
@@ -259,20 +263,21 @@
     }
 
     private IIndexDataflowHelperFactory createDataflowHelperFactory() {
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
         if (!isPartitioned) {
             return new LSMInvertedIndexDataflowHelperFactory(
                     AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                     AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                     AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                     AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
-                    GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES);
+                    storageProperties.getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages());
         } else {
             return new PartitionedLSMInvertedIndexDataflowHelperFactory(
                     AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                     AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                     AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                     AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
-                    GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES);
+                    storageProperties.getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages());
         }
     }
 }
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeCreator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeCreator.java
index 6f400f9..1a5e0da 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeCreator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeCreator.java
@@ -2,7 +2,8 @@
 
 import java.util.List;
 
-import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
 import edu.uci.ics.asterix.common.context.AsterixRuntimeComponentsProvider;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
@@ -49,20 +50,22 @@
     protected int numNestedSecondaryKeyFields;
     protected ATypeTag keyType;
 
-    protected SecondaryRTreeCreator(PhysicalOptimizationConfig physOptConf) {
-        super(physOptConf);
+    protected SecondaryRTreeCreator(PhysicalOptimizationConfig physOptConf,
+            IAsterixPropertiesProvider propertiesProvider) {
+        super(physOptConf, propertiesProvider);
     }
 
     @Override
     public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
         JobSpecification spec = new JobSpecification();
 
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
         //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
         ILocalResourceMetadata localResourceMetadata = new LSMRTreeLocalResourceMetadata(
                 secondaryRecDesc.getTypeTraits(), secondaryComparatorFactories, primaryComparatorFactories,
                 valueProviderFactories, RTreePolicyType.RTREE, AqlMetadataProvider.proposeLinearizer(keyType,
-                        secondaryComparatorFactories.length), GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE,
-                GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES);
+                        secondaryComparatorFactories.length), storageProperties.getMemoryComponentPageSize(),
+                storageProperties.getMemoryComponentNumPages());
         ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
                 localResourceMetadata, LocalResource.LSMRTreeResource);
 
@@ -74,8 +77,8 @@
                         AsterixRuntimeComponentsProvider.LSMRTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMRTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMRTREE_PROVIDER, AqlMetadataProvider.proposeLinearizer(
-                                keyType, secondaryComparatorFactories.length),
-                        GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES),
+                                keyType, secondaryComparatorFactories.length), storageProperties
+                                .getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()),
                 localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
                 secondaryPartitionConstraint);
@@ -149,6 +152,7 @@
             selectOp = createFilterNullsSelectOp(spec, numNestedSecondaryKeyFields);
         }
 
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
         // Create secondary RTree bulk load op.
         TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = createTreeIndexBulkLoadOp(
                 spec,
@@ -158,8 +162,8 @@
                         AsterixRuntimeComponentsProvider.LSMRTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMRTREE_PROVIDER,
                         AsterixRuntimeComponentsProvider.LSMRTREE_PROVIDER, AqlMetadataProvider.proposeLinearizer(
-                                keyType, secondaryComparatorFactories.length),
-                        GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES),
+                                keyType, secondaryComparatorFactories.length), storageProperties
+                                .getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()),
                 BTree.DEFAULT_FILL_FACTOR);
 
         // Connect the operators.
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
index f50005a..6004ba4 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
@@ -13,11 +13,11 @@
 import edu.uci.ics.asterix.api.http.servlet.QueryResultAPIServlet;
 import edu.uci.ics.asterix.api.http.servlet.QueryStatusAPIServlet;
 import edu.uci.ics.asterix.api.http.servlet.UpdateAPIServlet;
-import edu.uci.ics.asterix.common.api.AsterixAppContextInfoImpl;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfo;
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
 import edu.uci.ics.asterix.metadata.MetadataManager;
 import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
-import edu.uci.ics.asterix.metadata.bootstrap.AsterixProperties;
 import edu.uci.ics.asterix.metadata.bootstrap.AsterixStateProxy;
 import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
 import edu.uci.ics.hyracks.api.application.ICCApplicationEntryPoint;
@@ -29,10 +29,6 @@
 
     private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
 
-    private static final int DEFAULT_WEB_SERVER_PORT = 19001;
-
-    private static final int DEFAULT_JSON_API_SERVER_PORT = 19101;
-
     private Server webServer;
     private Server jsonAPIServer;
     private static IAsterixStateProxy proxy;
@@ -45,22 +41,22 @@
             LOGGER.info("Starting Asterix cluster controller");
         }
 
+        AsterixAppContextInfo.initialize(appCtx);
+
         proxy = AsterixStateProxy.registerRemoteObject();
-        proxy.setAsterixProperties(AsterixProperties.INSTANCE);
         appCtx.setDistributedState(proxy);
 
-        MetadataManager.INSTANCE = new MetadataManager(proxy);
+        AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.getInstance().getMetadataProperties();
+        MetadataManager.INSTANCE = new MetadataManager(proxy, metadataProperties);
 
-        setupWebServer();
+        AsterixExternalProperties externalProperties = AsterixAppContextInfo.getInstance().getExternalProperties();
+        setupWebServer(externalProperties);
         webServer.start();
-        System.out.println("STARTED WEB SERVER");
 
-        // Setup and start the web interface
-        setupJSONAPIServer();
+        setupJSONAPIServer(externalProperties);
         jsonAPIServer.start();
-
-        AsterixAppContextInfoImpl.initialize(appCtx);
         ExternalLibraryBootstrap.setUpExternaLibraries(false);
+
     }
 
     @Override
@@ -80,13 +76,9 @@
         return new HyracksConnection(strIP, port);
     }
 
-    private void setupWebServer() throws Exception {
-        String portStr = System.getProperty(GlobalConfig.WEB_SERVER_PORT_PROPERTY);
-        int port = DEFAULT_WEB_SERVER_PORT;
-        if (portStr != null) {
-            port = Integer.parseInt(portStr);
-        }
-        webServer = new Server(port);
+    private void setupWebServer(AsterixExternalProperties externalProperties) throws Exception {
+
+        webServer = new Server(externalProperties.getWebInterfacePort());
 
         ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
         context.setContextPath("/");
@@ -98,13 +90,8 @@
         context.addServlet(new ServletHolder(new APIServlet()), "/*");
     }
 
-    private void setupJSONAPIServer() throws Exception {
-        String portStr = System.getProperty(GlobalConfig.JSON_API_SERVER_PORT_PROPERTY);
-        int port = DEFAULT_JSON_API_SERVER_PORT;
-        if (portStr != null) {
-            port = Integer.parseInt(portStr);
-        }
-        jsonAPIServer = new Server(port);
+    private void setupJSONAPIServer(AsterixExternalProperties externalProperties) throws Exception {
+        jsonAPIServer = new Server(externalProperties.getAPIServerPort());
 
         ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
         context.setContextPath("/");
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
index 25c617e..b4b8407 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
@@ -5,6 +5,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
 import edu.uci.ics.asterix.common.context.AsterixAppRuntimeContext;
 import edu.uci.ics.asterix.metadata.MetadataManager;
 import edu.uci.ics.asterix.metadata.MetadataNode;
@@ -44,17 +45,20 @@
         // #. recover if the system is corrupted by checking system state.
         IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
         systemState = recoveryMgr.getSystemState();
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("System is in a state: " + systemState);
+        }
+
         if (systemState != SystemState.NEW_UNIVERSE) {
             PersistentLocalResourceRepository localResourceRepository = (PersistentLocalResourceRepository) runtimeContext
                     .getLocalResourceRepository();
             localResourceRepository.initialize(nodeId, null, false, runtimeContext.getResourceIdFactory());
         }
         if (systemState == SystemState.CORRUPTED) {
-            System.out.println("doing recovery");
             recoveryMgr.startRecovery(true);
         } else if (systemState == SystemState.NEW_UNIVERSE) {
             recoveryMgr.checkpoint(true);
-            System.out.println("new universe");
         }
     }
 
@@ -84,38 +88,40 @@
     @Override
     public void notifyStartupComplete() throws Exception {
         IAsterixStateProxy proxy = (IAsterixStateProxy) ncApplicationContext.getDistributedState();
+        AsterixMetadataProperties metadataProperties = runtimeContext.getMetadataProperties();
 
         if (systemState == SystemState.NEW_UNIVERSE) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("System state: " + SystemState.NEW_UNIVERSE);
+                LOGGER.info("Node ID: " + nodeId);
+                LOGGER.info("Stores: " + metadataProperties.getStores());
+                LOGGER.info("Root Metadata Store: " + metadataProperties.getStores().get(nodeId)[0]);
+            }
+
             PersistentLocalResourceRepository localResourceRepository = (PersistentLocalResourceRepository) runtimeContext
                     .getLocalResourceRepository();
-            System.out.println("nodeid" + nodeId);
-            System.out.println("proxy" + proxy);
-            System.out.println("stores" + proxy.getAsterixProperties().getStores());
-            System.out.println("store" + proxy.getAsterixProperties().getStores().get(nodeId)[0]);
-
-            localResourceRepository.initialize(nodeId, proxy.getAsterixProperties().getStores().get(nodeId)[0], true,
-                    null);
+            localResourceRepository.initialize(nodeId, metadataProperties.getStores().get(nodeId)[0], true, null);
         }
 
-        isMetadataNode = nodeId.equals(proxy.getAsterixProperties().getMetadataNodeName());
+        isMetadataNode = nodeId.equals(metadataProperties.getMetadataNodeName());
         if (isMetadataNode) {
             registerRemoteMetadataNode(proxy);
 
             if (LOGGER.isLoggable(Level.INFO)) {
                 LOGGER.info("Bootstrapping metadata");
             }
-            MetadataManager.INSTANCE = new MetadataManager(proxy);
+
+            MetadataManager.INSTANCE = new MetadataManager(proxy, metadataProperties);
             MetadataManager.INSTANCE.init();
-            MetadataBootstrap.startUniverse(proxy.getAsterixProperties(), ncApplicationContext,
+            MetadataBootstrap.startUniverse(runtimeContext, ncApplicationContext,
                     systemState == SystemState.NEW_UNIVERSE);
             MetadataBootstrap.startDDLRecovery();
         }
+
         ExternalLibraryBootstrap.setUpExternaLibraries(isMetadataNode);
-        System.out.println("Checked for installing libraries");
 
         IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
         recoveryMgr.checkpoint(true);
-        System.out.println("bootstrap complete");
 
         // TODO
         // reclaim storage for orphaned index artifacts in NCs.
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java b/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java
index 48bde1c..48ba0b1 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java
@@ -19,7 +19,7 @@
 import edu.uci.ics.asterix.common.config.GlobalConfig;
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.DatasetJobRecord.Status;
 import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
 import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetReader;
 import edu.uci.ics.hyracks.api.dataset.ResultSetId;
diff --git a/asterix-app/src/main/resources/asterix-build-configuration.xml b/asterix-app/src/main/resources/asterix-build-configuration.xml
new file mode 100644
index 0000000..20ffb37
--- /dev/null
+++ b/asterix-app/src/main/resources/asterix-build-configuration.xml
@@ -0,0 +1,16 @@
+<asterixConfiguration xmlns="asterixconf">
+  <metadataNode>nc1</metadataNode>
+  <store>
+     <ncId>nc1</ncId>
+     <storeDirs>nc1data</storeDirs> 
+  </store>
+  <store>
+     <ncId>nc2</ncId>
+     <storeDirs>nc2data</storeDirs> 
+  </store>
+  <property>
+     <name>log.level</name>
+     <value>WARNING</value>
+     <description></description>
+  </property>
+</asterixConfiguration>
diff --git a/asterix-app/src/main/resources/asterix-metadata.properties b/asterix-app/src/main/resources/asterix-metadata.properties
deleted file mode 100644
index e9ccc63..0000000
--- a/asterix-app/src/main/resources/asterix-metadata.properties
+++ /dev/null
@@ -1,4 +0,0 @@
-MetadataNode=nc1
-NewUniverse=true
-nc1.stores=/tmp/nc1data/
-nc2.stores=/tmp/nc2data/, /tmp/nc2data1/
\ No newline at end of file
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
index 56259fa..dfbd305 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
@@ -14,23 +14,11 @@
  */
 package edu.uci.ics.asterix.test.metadata;
 
-import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
-import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.List;
-import java.util.logging.Logger;
 
-import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpStatus;
-import org.apache.commons.httpclient.NameValuePair;
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.apache.commons.httpclient.params.HttpMethodParams;
 import org.apache.commons.io.FileUtils;
-import org.json.JSONObject;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -42,8 +30,6 @@
 import edu.uci.ics.asterix.common.config.GlobalConfig;
 import edu.uci.ics.asterix.test.aql.TestsUtils;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
-import edu.uci.ics.asterix.testframework.context.TestFileContext;
-import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
 
 /**
  * Executes the Metadata tests.
@@ -53,10 +39,9 @@
 
     private TestCaseContext tcCtx;
 
-    private static final Logger LOGGER = Logger.getLogger(MetadataTest.class.getName());
     private static final String PATH_ACTUAL = "mdtest/";
     private static final String PATH_BASE = "src/test/resources/metadata/";
-    private static final String TEST_CONFIG_FILE_NAME = "test.properties";
+    private static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
     private static final String WEB_SERVER_PORT = "19002";
 
     @BeforeClass
@@ -109,184 +94,9 @@
         this.tcCtx = tcCtx;
     }
 
-    // Method that reads a DDL/Update/Query File
-    // and returns the contents as a string
-    // This string is later passed to REST API for execution.
-    public String readTestFile(File testFile) throws Exception {
-        BufferedReader reader = new BufferedReader(new FileReader(testFile));
-        String line = null;
-        StringBuilder stringBuilder = new StringBuilder();
-        String ls = System.getProperty("line.separator");
-
-        while ((line = reader.readLine()) != null) {
-            stringBuilder.append(line);
-            stringBuilder.append(ls);
-        }
-
-        return stringBuilder.toString();
-    }
-
-    // To execute DDL and Update statements
-    // create type statement
-    // create dataset statement
-    // create index statement
-    // create dataverse statement
-    // create function statement
-    public void executeDDL(String str) throws Exception {
-        final String url = "http://localhost:19101/ddl";
-
-        // Create an instance of HttpClient.
-        HttpClient client = new HttpClient();
-
-        // Create a method instance.
-        GetMethod method = new GetMethod(url);
-
-        method.setQueryString(new NameValuePair[] { new NameValuePair("ddl", str) });
-
-        // Provide custom retry handler is necessary
-        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
-        // Execute the method.
-        int statusCode = client.executeMethod(method);
-
-        // Read the response body as String.
-        String responseBody = method.getResponseBodyAsString();
-
-        // Check if the method was executed successfully.
-        if (statusCode != HttpStatus.SC_OK) {
-            System.err.println("Method failed: " + method.getStatusLine());
-        }
-    }
-
-    // To execute Update statements
-    // Insert and Delete statements are executed here
-    public void executeUpdate(String str) throws Exception {
-        final String url = "http://localhost:19101/update";
-
-        // Create an instance of HttpClient.
-        HttpClient client = new HttpClient();
-
-        // Create a method instance.
-        GetMethod method = new GetMethod(url);
-
-        method.setQueryString(new NameValuePair[] { new NameValuePair("statements", str) });
-
-        // Provide custom retry handler is necessary
-        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
-        // Execute the method.
-        int statusCode = client.executeMethod(method);
-
-        // Read the response body as String.
-        String responseBody = method.getResponseBodyAsString();
-
-        // Check if the method was executed successfully.
-        if (statusCode != HttpStatus.SC_OK) {
-            System.err.println("Method failed: " + method.getStatusLine());
-        }
-    }
-
-    // Executes Query and returns results as JSONArray
-    public JSONObject executeQuery(String str) throws Exception {
-
-        final String url = "http://localhost:19101/query";
-
-        // Create an instance of HttpClient.
-        HttpClient client = new HttpClient();
-
-        // Create a method instance.
-        GetMethod method = new GetMethod(url);
-
-        method.setQueryString(new NameValuePair[] { new NameValuePair("query", str) });
-
-        // Provide custom retry handler is necessary
-        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
-        JSONObject result = null;
-
-        try {
-            // Execute the method.
-            int statusCode = client.executeMethod(method);
-
-            // Check if the method was executed successfully.
-            if (statusCode != HttpStatus.SC_OK) {
-                System.err.println("Method failed: " + method.getStatusLine());
-            }
-
-            // Read the response body as String.
-            String responseBody = method.getResponseBodyAsString();
-
-            result = new JSONObject(responseBody);
-        } catch (Exception e) {
-            System.out.println(e.getMessage());
-            e.printStackTrace();
-        }
-        return result;
-    }
-
     @Test
     public void test() throws Exception {
-        List<TestFileContext> testFileCtxs;
-        List<TestFileContext> expectedResultFileCtxs;
-
-        File testFile;
-        File expectedResultFile;
-        String statement;
-
-        int queryCount = 0;
-        JSONObject result;
-
-        List<CompilationUnit> cUnits = tcCtx.getTestCase().getCompilationUnit();
-        for (CompilationUnit cUnit : cUnits) {
-            testFileCtxs = tcCtx.getTestFiles(cUnit);
-            expectedResultFileCtxs = tcCtx.getExpectedResultFiles(cUnit);
-
-            for (TestFileContext ctx : testFileCtxs) {
-                testFile = ctx.getFile();
-                statement = readTestFile(testFile);
-                try {
-                    switch (ctx.getType()) {
-                        case "ddl":
-                            executeDDL(statement);
-                            break;
-                        case "update":
-                            executeUpdate(statement);
-                            break;
-                        case "query":
-                            result = executeQuery(statement);
-                            if (!cUnit.getExpectedError().isEmpty()) {
-                                if (!result.has("error")) {
-                                    throw new Exception("Test \"" + testFile + "\" FAILED!");
-                                }
-                            } else {
-                                expectedResultFile = expectedResultFileCtxs.get(queryCount).getFile();
-
-                                File actualFile = new File(PATH_ACTUAL + File.separator
-                                        + tcCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_"
-                                        + cUnit.getName() + ".adm");
-
-                                File actualResultFile = tcCtx.getActualResultFile(cUnit, new File(PATH_ACTUAL));
-                                actualResultFile.getParentFile().mkdirs();
-
-                                TestsUtils.writeResultsToFile(actualFile, result);
-
-                                TestsUtils.runScriptAndCompareWithResult(testFile, new PrintWriter(System.err),
-                                        expectedResultFile, actualFile);
-                            }
-                            queryCount++;
-                            break;
-                        default:
-                            throw new IllegalArgumentException("No statements of type " + ctx.getType());
-                    }
-                } catch (Exception e) {
-                    LOGGER.severe("Test \"" + testFile + "\" FAILED!");
-                    e.printStackTrace();
-                    if (cUnit.getExpectedError().isEmpty()) {
-                        throw new Exception("Test \"" + testFile + "\" FAILED!", e);
-                    }
-                }
-            }
-        }
+        TestsUtils.executeTest(PATH_ACTUAL, tcCtx);
     }
 
-}
+}
\ No newline at end of file
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
index 8530ba1..0f1f8b5 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
@@ -47,7 +47,7 @@
 
     private static final ArrayList<String> ignore = AsterixTestHelper.readFile(FILENAME_IGNORE, PATH_BASE);
     private static final ArrayList<String> only = AsterixTestHelper.readFile(FILENAME_ONLY, PATH_BASE);
-    private static final String TEST_CONFIG_FILE_NAME = "asterix-metadata.properties";
+    private static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
 
     @BeforeClass
     public static void setUp() throws Exception {
@@ -58,12 +58,12 @@
         System.setProperty(GlobalConfig.WEB_SERVER_PORT_PROPERTY, "19002");
         File outdir = new File(PATH_ACTUAL);
         outdir.mkdirs();
-        
+
         File log = new File("asterix_logs");
         if (log.exists()) {
-            FileUtils.deleteDirectory(log); 
+            FileUtils.deleteDirectory(log);
         }
-        
+
         AsterixHyracksIntegrationUtil.init();
         // Set the node resolver to be the identity resolver that expects node names 
         // to be node controller ids; a valid assumption in test environment. 
@@ -79,10 +79,10 @@
         if (files == null || files.length == 0) {
             outdir.delete();
         }
-        
+
         File log = new File("asterix_logs");
         if (log.exists()) {
-            FileUtils.deleteDirectory(log); 
+            FileUtils.deleteDirectory(log);
         }
     }
 
@@ -138,7 +138,7 @@
             }
             Assume.assumeTrue(!skipped);
 
-            LOGGER.severe("RUN TEST: \"" + queryFile.getPath() + "\"");
+            LOGGER.info("RUN TEST: \"" + queryFile.getPath() + "\"");
             Reader query = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
             PrintWriter plan = new PrintWriter(actualFile);
             AsterixJavaClient asterix = new AsterixJavaClient(
@@ -180,7 +180,7 @@
                     throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< \n> "
                             + lineActual);
                 }
-                LOGGER.severe("Test \"" + queryFile.getPath() + "\" PASSED!");
+                LOGGER.info("Test \"" + queryFile.getPath() + "\" PASSED!");
                 actualFile.delete();
             } finally {
                 readerExpected.close();
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
index 299c43c..2289484 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
@@ -1,22 +1,10 @@
 package edu.uci.ics.asterix.test.runtime;
 
-import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileReader;
-import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.List;
-import java.util.logging.Logger;
 
-import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpStatus;
-import org.apache.commons.httpclient.NameValuePair;
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.apache.commons.httpclient.params.HttpMethodParams;
 import org.apache.commons.io.FileUtils;
-import org.json.JSONObject;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -30,8 +18,6 @@
 import edu.uci.ics.asterix.external.util.IdentitiyResolverFactory;
 import edu.uci.ics.asterix.test.aql.TestsUtils;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
-import edu.uci.ics.asterix.testframework.context.TestFileContext;
-import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
 
 /**
  * Runs the runtime test cases under 'asterix-app/src/test/resources/runtimets'.
@@ -41,13 +27,9 @@
     private static final String PATH_ACTUAL = "rttest/";
     private static final String PATH_BASE = "src/test/resources/runtimets/";
 
-    private static final String TEST_CONFIG_FILE_NAME = "test.properties";
+    private static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
     private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
 
-    private static final Logger LOGGER = Logger.getLogger(ExecutionTest.class.getName());
-
-    // private static NCBootstrapImpl _bootstrap = new NCBootstrapImpl();
-
     @BeforeClass
     public static void setUp() throws Exception {
         System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
@@ -108,177 +90,8 @@
         this.tcCtx = tcCtx;
     }
 
-    // Method that reads a DDL/Update/Query File
-    // and returns the contents as a string
-    // This string is later passed to REST API for execution.
-    public String readTestFile(File testFile) throws Exception {
-        BufferedReader reader = new BufferedReader(new FileReader(testFile));
-        String line = null;
-        StringBuilder stringBuilder = new StringBuilder();
-        String ls = System.getProperty("line.separator");
-
-        while ((line = reader.readLine()) != null) {
-            stringBuilder.append(line);
-            stringBuilder.append(ls);
-        }
-
-        return stringBuilder.toString();
-    }
-
-    // To execute DDL and Update statements
-    // create type statement
-    // create dataset statement
-    // create index statement
-    // create dataverse statement
-    // create function statement
-    public void executeDDL(String str) throws Exception {
-        final String url = "http://localhost:19101/ddl";
-
-        // Create an instance of HttpClient.
-        HttpClient client = new HttpClient();
-
-        // Create a method instance.
-        GetMethod method = new GetMethod(url);
-
-        method.setQueryString(new NameValuePair[] { new NameValuePair("ddl", str) });
-
-        // Provide custom retry handler is necessary
-        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
-        // Execute the method.
-        int statusCode = client.executeMethod(method);
-
-        // Check if the method was executed successfully.
-        if (statusCode != HttpStatus.SC_OK) {
-            System.err.println("Method failed: " + method.getStatusLine());
-        }
-    }
-
-    // To execute Update statements
-    // Insert and Delete statements are executed here
-    public void executeUpdate(String str) throws Exception {
-        final String url = "http://localhost:19101/update";
-
-        // Create an instance of HttpClient.
-        HttpClient client = new HttpClient();
-
-        // Create a method instance.
-        GetMethod method = new GetMethod(url);
-
-        method.setQueryString(new NameValuePair[] { new NameValuePair("statements", str) });
-
-        // Provide custom retry handler is necessary
-        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
-        // Execute the method.
-        int statusCode = client.executeMethod(method);
-
-        // Check if the method was executed successfully.
-        if (statusCode != HttpStatus.SC_OK) {
-            System.err.println("Method failed: " + method.getStatusLine());
-        }
-    }
-
-    // Executes Query and returns results as JSONArray
-    public JSONObject executeQuery(String str) throws Exception {
-
-        final String url = "http://localhost:19101/query";
-
-        // Create an instance of HttpClient.
-        HttpClient client = new HttpClient();
-
-        // Create a method instance.
-        GetMethod method = new GetMethod(url);
-
-        method.setQueryString(new NameValuePair[] { new NameValuePair("query", str) });
-
-        // Provide custom retry handler is necessary
-        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
-        JSONObject result = null;
-
-        try {
-            // Execute the method.
-            int statusCode = client.executeMethod(method);
-
-            // Check if the method was executed successfully.
-            if (statusCode != HttpStatus.SC_OK) {
-                System.err.println("Method failed: " + method.getStatusLine());
-            }
-
-            // Read the response body as String.
-            String responseBody = method.getResponseBodyAsString();
-
-            result = new JSONObject(responseBody);
-        } catch (Exception e) {
-            System.out.println(e.getMessage());
-            e.printStackTrace();
-        }
-        return result;
-    }
-
     @Test
     public void test() throws Exception {
-        List<TestFileContext> testFileCtxs;
-        List<TestFileContext> expectedResultFileCtxs;
-
-        File testFile;
-        File expectedResultFile;
-        String statement;
-
-        int queryCount = 0;
-        JSONObject result;
-
-        List<CompilationUnit> cUnits = tcCtx.getTestCase().getCompilationUnit();
-        for (CompilationUnit cUnit : cUnits) {
-            LOGGER.severe("[TEST]: " + tcCtx.getTestCase().getFilePath() + "/" + cUnit.getName());
-
-            testFileCtxs = tcCtx.getTestFiles(cUnit);
-            expectedResultFileCtxs = tcCtx.getExpectedResultFiles(cUnit);
-
-            for (TestFileContext ctx : testFileCtxs) {
-                testFile = ctx.getFile();
-                statement = readTestFile(testFile);
-                try {
-                    switch (ctx.getType()) {
-                        case "ddl":
-                            executeDDL(statement);
-                            break;
-                        case "update":
-                            executeUpdate(statement);
-                            break;
-                        case "query":
-                            result = executeQuery(statement);
-                            if (!cUnit.getExpectedError().isEmpty()) {
-                                if (!result.has("error")) {
-                                    throw new Exception("Test \"" + testFile + "\" FAILED!");
-                                }
-                            } else {
-                                expectedResultFile = expectedResultFileCtxs.get(queryCount).getFile();
-
-                                File actualFile = new File(PATH_ACTUAL + File.separator
-                                        + tcCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_"
-                                        + cUnit.getName() + ".adm");
-
-                                File actualResultFile = tcCtx.getActualResultFile(cUnit, new File(PATH_ACTUAL));
-                                actualResultFile.getParentFile().mkdirs();
-
-                                TestsUtils.writeResultsToFile(actualFile, result);
-
-                                TestsUtils.runScriptAndCompareWithResult(testFile, new PrintWriter(System.err),
-                                        expectedResultFile, actualFile);
-                            }
-                            queryCount++;
-                            break;
-                        default:
-                            throw new IllegalArgumentException("No statements of type " + ctx.getType());
-                    }
-                } catch (Exception e) {
-                    if (cUnit.getExpectedError().isEmpty()) {
-                        throw new Exception("Test \"" + testFile + "\" FAILED!", e);
-                    }
-                }
-            }
-        }
+        TestsUtils.executeTest(PATH_ACTUAL, tcCtx);
     }
 }
diff --git a/asterix-app/src/test/resources/AQLTS/queries/fieldAccessor.aql b/asterix-app/src/test/resources/AQLTS/queries/fieldAccessor.aql
new file mode 100644
index 0000000..3995374
--- /dev/null
+++ b/asterix-app/src/test/resources/AQLTS/queries/fieldAccessor.aql
@@ -0,0 +1,3 @@
+let $bla := { "name" : "value" }
+return
+  $bla."name" = $bla.name
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/AQLTS/queries/functionDecl3.aql b/asterix-app/src/test/resources/AQLTS/queries/functionDecl3.aql
new file mode 100644
index 0000000..80629be5
--- /dev/null
+++ b/asterix-app/src/test/resources/AQLTS/queries/functionDecl3.aql
@@ -0,0 +1,5 @@
+declare function "function with spaces"($a, $b) {
+  "string with spaces"
+};
+
+"function with spaces" (1, 2)
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/logging.properties b/asterix-app/src/test/resources/logging.properties
index 77da7cd..2935e13 100644
--- a/asterix-app/src/test/resources/logging.properties
+++ b/asterix-app/src/test/resources/logging.properties
@@ -60,8 +60,10 @@
 # For example, set the com.xyz.foo logger to only log SEVERE
 # messages:
 
+
+edu.uci.ics.asterix.test.level = INFO
 #edu.uci.ics.asterix.level = FINE
-#edu.uci.ics.algebricks.level = FINE
+#edu.uci.ics.hyracks.algebricks.level = FINE
 #edu.uci.ics.hyracks.level = INFO
 edu.uci.ics.asterix.test = INFO
 edu.uci.ics.asterix.installer.test = INFO
diff --git a/asterix-app/src/test/resources/optimizerts/queries/limit-issue353.aql b/asterix-app/src/test/resources/optimizerts/queries/limit-issue353.aql
new file mode 100644
index 0000000..f1b2678
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/limit-issue353.aql
@@ -0,0 +1,44 @@
+/*
+ * Description  : This test case is to verify the fix for issue353
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=353
+ * Expected Res : Success
+ * Date         : 28th April 2013
+ */
+
+drop dataverse tpch if exists;
+create dataverse tpch;
+
+use dataverse tpch;
+
+create type LineItemType as closed {
+  l_orderkey: int32, 
+  l_partkey: int32, 
+  l_suppkey: int32, 
+  l_linenumber: int32, 
+  l_quantity: double, 
+  l_extendedprice: double,
+  l_discount: double, 
+  l_tax: double,
+  l_returnflag: string, 
+  l_linestatus: string, 
+  l_shipdate: string,
+  l_commitdate: string, 
+  l_receiptdate: string, 
+  l_shipinstruct: string, 
+  l_shipmode: string, 
+  l_comment: string
+}
+
+create dataset LineItem(LineItemType)
+  primary key l_orderkey, l_linenumber;
+  
+write output to nc1:"/tmp/push_limit.adm";
+
+for $l in dataset('LineItem')
+limit 2
+return {
+"l_returnflag": $l.l_returnflag,
+"l_linestatus": $l.l_linestatus,
+"l_shipmode": $l.l_shipmode
+}
+
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-multipred.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-multipred.plan
index 5a5b715..e87dd88 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-multipred.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-multipred.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_01.plan
index c81244a..8315f2b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- HYBRID_HASH_JOIN [$$7][$$10]  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_02.plan
index abbed4a..6269a2b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- HYBRID_HASH_JOIN [$$9][$$8]  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan
index 0f3828c..c8fd4cb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan
index c1c1f33..be25081 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan
index 2ecd027..f847790 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_04.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_04.plan
index 2bc4b23..8e1be33 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_05.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_05.plan
index fa60146..d3134f2 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_05.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_05.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_01.plan
index 0f0c06b..66ec32d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_02.plan
index 5cb2903..28645d5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_01.plan
index 0f0c06b..66ec32d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_02.plan
index 5cb2903..28645d5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_01.plan
index 0f0c06b..66ec32d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_02.plan
index 5cb2903..28645d5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_01.plan
index 0f0c06b..66ec32d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_02.plan
index 5cb2903..28645d5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multiindex.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multiindex.plan
index be64ee5..a22212e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multiindex.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multiindex.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multipred.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multipred.plan
index 96632a5..0438a5d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multipred.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multipred.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_01.plan
index fac45f2..77aba01 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_02.plan
index fac45f2..77aba01 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_03.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_03.plan
index fac45f2..77aba01 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-01.plan
index 426a99d..3d9e18f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-02.plan
index 426a99d..3d9e18f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-03.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-03.plan
index 426a99d..3d9e18f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-04.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-04.plan
index 426a99d..3d9e18f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-05.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-05.plan
index 426a99d..3d9e18f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-05.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-05.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-06.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-06.plan
index 426a99d..3d9e18f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-06.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-06.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-07.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-07.plan
index 426a99d..3d9e18f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-07.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-07.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-08.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-08.plan
index 50b1c04..6f13ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-08.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-08.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-09.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-09.plan
index a78e43f..011cf66 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-09.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-09.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-10.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-10.plan
index a78e43f..011cf66 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-10.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-10.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-11.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-11.plan
index a78e43f..011cf66 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-11.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-11.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-12.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-12.plan
index a78e43f..011cf66 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-12.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-12.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-13.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-13.plan
index a78e43f..011cf66 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-13.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-13.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-14.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-14.plan
index a78e43f..011cf66 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-14.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-14.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-15.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-15.plan
index 50b1c04..6f13ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-15.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-15.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-16.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-16.plan
index 50b1c04..6f13ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-16.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-16.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-17.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-17.plan
index a78e43f..011cf66 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-17.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-17.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-18.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-18.plan
index a78e43f..011cf66 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-18.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-18.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-19.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-19.plan
index 50b1c04..6f13ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-19.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-19.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-20.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-20.plan
index 50b1c04..6f13ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-20.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-20.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-21.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-21.plan
index 50b1c04..6f13ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-21.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-21.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-22.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-22.plan
index 50b1c04..6f13ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-22.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-22.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-23.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-23.plan
index 50b1c04..6f13ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-23.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-23.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-24.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-24.plan
index 50b1c04..6f13ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-24.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-24.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-25.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-25.plan
index 50b1c04..6f13ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-25.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-25.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-26.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-26.plan
index 50b1c04..6f13ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-26.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-26.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- BTREE_SEARCH  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-27.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-27.plan
index 40853fa..632a813 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-27.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-27.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-28.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-28.plan
index 5021595..d85d357 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-28.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-28.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-29.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-29.plan
index 5021595..d85d357 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-29.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-29.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-30.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-30.plan
index 5021595..d85d357 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-30.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-30.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-31.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-31.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-31.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-31.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-32.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-32.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-32.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-32.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-33.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-33.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-33.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-33.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-34.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-34.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-34.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-34.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-35.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-35.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-35.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-35.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-36.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-36.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-36.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-36.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-37.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-37.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-37.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-37.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-38.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-38.plan
index 2110116..ef8a923 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-38.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-38.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-39.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-39.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-39.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-39.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-40.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-40.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-40.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-40.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-41.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-41.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-41.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-41.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-42.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-42.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-42.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-42.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-43.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-43.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-43.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-43.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-44.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-44.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-44.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-44.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-45.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-45.plan
index 7087a22..4b4412e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-45.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-45.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-46.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-46.plan
index 7087a22..4b4412e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-46.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-46.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-47.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-47.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-47.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-47.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-48.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-48.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-48.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-48.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-49.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-49.plan
index 7087a22..4b4412e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-49.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-49.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-50.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-50.plan
index 7087a22..4b4412e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-50.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-50.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-51.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-51.plan
index 7087a22..4b4412e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-51.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-51.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-52.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-52.plan
index a48d23a..001e10b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-52.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-52.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-53.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-53.plan
index a48d23a..001e10b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-53.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-53.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-54.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-54.plan
index a48d23a..001e10b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-54.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-54.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-55.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-55.plan
index a48d23a..001e10b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-55.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-55.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-56.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-56.plan
index f18d0ad..e12cdd8 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-56.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-56.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-57.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-57.plan
index f18d0ad..e12cdd8 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-57.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-57.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-58.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-58.plan
index 5b0ae2a..d7667d6 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-58.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-58.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-59.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-59.plan
index 7087a22..4b4412e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-59.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-59.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-complex.plan b/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-complex.plan
index 8d2b4d8..10b5853 100644
--- a/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-complex.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-complex.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-simple.plan b/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-simple.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-simple.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-simple.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/count-tweets.plan b/asterix-app/src/test/resources/optimizerts/results/count-tweets.plan
index f24833e..600795f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/count-tweets.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/count-tweets.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/cust_group_no_agg.plan b/asterix-app/src/test/resources/optimizerts/results/cust_group_no_agg.plan
index a1242ff..ab9747f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/cust_group_no_agg.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/cust_group_no_agg.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- SORT_MERGE_EXCHANGE [$$1(ASC) ]  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- PRE_CLUSTERED_GROUP_BY[$$6]  |PARTITIONED|
             {
               -- AGGREGATE  |LOCAL|
diff --git a/asterix-app/src/test/resources/optimizerts/results/denorm-cust-order.plan b/asterix-app/src/test/resources/optimizerts/results/denorm-cust-order.plan
index 80f0a9d..86bafc3 100644
--- a/asterix-app/src/test/resources/optimizerts/results/denorm-cust-order.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/denorm-cust-order.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan b/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
index 0b44c4d..494b208 100644
--- a/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check-panic.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check-panic.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check-panic.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check-panic.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check.plan
index 7aa19a6..d7a4c06 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-panic.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-panic.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-panic.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-panic.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance.plan
index 7aa19a6..d7a4c06 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-edit-distance.plan
index b5ea9d7..021f810 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-edit-distance.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-jaccard.plan
index 504d5bf..aec97d2 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard-check.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard-check.plan
index f0adce8..2a2ccc4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard-check.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard.plan
index f0adce8..2a2ccc4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-jaccard.plan
index 7aa19a6..d7a4c06 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard-check.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard-check.plan
index 504d5bf..aec97d2 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard-check.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard.plan
index 504d5bf..aec97d2 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-fuzzyeq-jaccard.plan
index 7aa19a6..d7a4c06 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-fuzzyeq-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard-check.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard-check.plan
index 504d5bf..aec97d2 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard-check.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard.plan
index 504d5bf..aec97d2 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-fuzzyeq-jaccard.plan
index 504d5bf..aec97d2 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-fuzzyeq-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard-check.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard-check.plan
index f0adce8..2a2ccc4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard-check.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard.plan
index f0adce8..2a2ccc4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.plan
index 96697b6..3028f9a 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.plan
index 96697b6..3028f9a 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- STREAM_SELECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let.plan
index 504d5bf..aec97d2 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-let.plan
index 7ae3ecd..78e761f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-let.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-multi-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-multi-let.plan
index db4c5c5..57730fe 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-multi-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-multi-let.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-jaccard-check-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-jaccard-check-let.plan
index f0adce8..2a2ccc4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-jaccard-check-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-jaccard-check-let.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ulist-jaccard-check-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ulist-jaccard-check-let.plan
index f0adce8..2a2ccc4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ulist-jaccard-check-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ulist-jaccard-check-let.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-let.plan
index 7ae3ecd..78e761f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-let.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-multi-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-multi-let.plan
index db4c5c5..57730fe 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-multi-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-multi-let.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance-inline.plan
index 73ba563..0fab288 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance-inline.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- UNION_ALL  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance.plan
index 37d4f2c..f7d3cb4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- UNION_ALL  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.plan
index 22cb67b..1be3fb1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- UNION_ALL  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.plan
index df958f7..7c74f83 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard-inline.plan
index 50966d0..c5534d6 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard-inline.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard.plan
index 0bb698c..fdbf50f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance-inline.plan
index 73ba563..0fab288 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance-inline.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- UNION_ALL  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance.plan
index 37d4f2c..f7d3cb4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- UNION_ALL  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.plan
index 22cb67b..1be3fb1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- UNION_ALL  |PARTITIONED|
       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.plan
index 294d740..88c4469 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard-inline.plan
index 141fd9d..8b6b08b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard-inline.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard.plan
index a70fe23..ba9879d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.plan
index 294d740..88c4469 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard-inline.plan
index 141fd9d..8b6b08b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard-inline.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard.plan
index a70fe23..ba9879d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.plan
index df958f7..7c74f83 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard-inline.plan
index 50966d0..c5534d6 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard-inline.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard.plan
index 0bb698c..fdbf50f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_01.plan
index 63c5436..a04e378 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_02.plan
index 2181d95..1800e75 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_03.plan
index 63c5436..a04e378 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_04.plan
index dc42118..abd2c39 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_01.plan
index 63c5436..a04e378 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_02.plan
index 2181d95..1800e75 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_03.plan
index 63c5436..a04e378 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_04.plan
index 43dbff9..dffe718 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_01.plan
index 25c87b0..65be14f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_02.plan
index 98aa0ef..bfee1a5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_03.plan
index 98aa0ef..bfee1a5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_01.plan
index 700ebb7..ab5cf83 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_02.plan
index 69948aa..7877b30 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_03.plan
index 700ebb7..ab5cf83 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_01.plan
index 901e0f4..3f7ecb1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_02.plan
index d23a6f5..4b767ab 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_03.plan
index 901e0f4..3f7ecb1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_04.plan
index 1d1cd5f..77b00ff 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_01.plan
index 901e0f4..3f7ecb1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_02.plan
index d23a6f5..4b767ab 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_03.plan
index 901e0f4..3f7ecb1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_04.plan
index d1b8f79..32d7e09 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_01.plan
index 63c5436..a04e378 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_02.plan
index 2181d95..1800e75 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_03.plan
index 63c5436..a04e378 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_04.plan
index dc42118..abd2c39 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_01.plan
index 63c5436..a04e378 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_02.plan
index 2181d95..1800e75 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_03.plan
index 63c5436..a04e378 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_04.plan
index 43dbff9..dffe718 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_01.plan
index 98aa0ef..bfee1a5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_02.plan
index 25c87b0..65be14f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_03.plan
index 98aa0ef..bfee1a5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_01.plan
index 4cef1ae..2a4e9de 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_02.plan
index a21a7b5..06826c1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_03.plan
index 4cef1ae..2a4e9de 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_01.plan
index 44328ef..7c16cef 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_02.plan
index 6d8460a..3d0f3f1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_03.plan
index 44328ef..7c16cef 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_04.plan
index 4a4430c..156dd43 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_01.plan
index 44328ef..7c16cef 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_02.plan
index 6d8460a..3d0f3f1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_03.plan
index 44328ef..7c16cef 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_04.plan
index b6a694e..47f5c91 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_01.plan
index 4cef1ae..2a4e9de 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_02.plan
index a21a7b5..06826c1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_03.plan
index 4cef1ae..2a4e9de 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_01.plan
index 44328ef..7c16cef 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_02.plan
index 6d8460a..3d0f3f1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_03.plan
index 44328ef..7c16cef 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_04.plan
index 4a4430c..156dd43 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_01.plan
index 44328ef..7c16cef 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_02.plan
index 6d8460a..3d0f3f1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_03.plan
index 44328ef..7c16cef 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_04.plan
index b6a694e..47f5c91 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_01.plan
index 700ebb7..ab5cf83 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_02.plan
index 69948aa..7877b30 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_03.plan
index 700ebb7..ab5cf83 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_01.plan
index 901e0f4..3f7ecb1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_02.plan
index b77169d..0fced26 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_03.plan
index 901e0f4..3f7ecb1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_04.plan
index 1d1cd5f..77b00ff 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_01.plan
index 901e0f4..3f7ecb1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_02.plan
index d23a6f5..4b767ab 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_03.plan
index 901e0f4..3f7ecb1 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_04.plan
index d1b8f79..32d7e09 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/join-super-key_01.plan b/asterix-app/src/test/resources/optimizerts/results/join-super-key_01.plan
index 011f8be..8149181 100644
--- a/asterix-app/src/test/resources/optimizerts/results/join-super-key_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/join-super-key_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/join-super-key_02.plan b/asterix-app/src/test/resources/optimizerts/results/join-super-key_02.plan
index 591ddbf..92adb3d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/join-super-key_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/join-super-key_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/limit-issue353.plan b/asterix-app/src/test/resources/optimizerts/results/limit-issue353.plan
new file mode 100644
index 0000000..829a245
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/limit-issue353.plan
@@ -0,0 +1,12 @@
+-- DISTRIBUTE_RESULT  |UNPARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+    -- STREAM_PROJECT  |UNPARTITIONED|
+      -- ASSIGN  |UNPARTITIONED|
+        -- STREAM_LIMIT  |UNPARTITIONED|
+          -- STREAM_PROJECT  |PARTITIONED|
+            -- SORT_MERGE_EXCHANGE [$$6(ASC), $$7(ASC) ]  |PARTITIONED|
+              -- STREAM_LIMIT  |PARTITIONED|
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  -- DATASOURCE_SCAN  |PARTITIONED|
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/loj-super-key_01.plan b/asterix-app/src/test/resources/optimizerts/results/loj-super-key_01.plan
index 7dac263..60e9899 100644
--- a/asterix-app/src/test/resources/optimizerts/results/loj-super-key_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/loj-super-key_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/loj-super-key_02.plan b/asterix-app/src/test/resources/optimizerts/results/loj-super-key_02.plan
index 63a3b4e..78159b2 100644
--- a/asterix-app/src/test/resources/optimizerts/results/loj-super-key_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/loj-super-key_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/nested_loj2.plan b/asterix-app/src/test/resources/optimizerts/results/nested_loj2.plan
index f0e1d55..41ae699 100644
--- a/asterix-app/src/test/resources/optimizerts/results/nested_loj2.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/nested_loj2.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/nested_loj3.plan b/asterix-app/src/test/resources/optimizerts/results/nested_loj3.plan
index b68ef76..6d0e32e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/nested_loj3.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/nested_loj3.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/noncollocated.plan b/asterix-app/src/test/resources/optimizerts/results/noncollocated.plan
index f1e1f5d..e92a84c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/noncollocated.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/noncollocated.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-aggreg.plan b/asterix-app/src/test/resources/optimizerts/results/orders-aggreg.plan
index f32df8e..08ad861 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-aggreg.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-aggreg.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-composite-index-search.plan b/asterix-app/src/test/resources/optimizerts/results/orders-composite-index-search.plan
index 089064b..39a9eda 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-composite-index-search.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-composite-index-search.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-open.plan b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-open.plan
index 66b693c..b78849b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-open.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-open.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-index-search.plan b/asterix-app/src/test/resources/optimizerts/results/orders-index-search.plan
index 66b693c..b78849b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-index-search.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-index-search.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/prim-idx-search-open.plan b/asterix-app/src/test/resources/optimizerts/results/prim-idx-search-open.plan
index e8b196d..2a91fc5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/prim-idx-search-open.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/prim-idx-search-open.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/prim-idx-search.plan b/asterix-app/src/test/resources/optimizerts/results/prim-idx-search.plan
index e8b196d..2a91fc5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/prim-idx-search.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/prim-idx-search.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/pull_select_above_eq_join.plan b/asterix-app/src/test/resources/optimizerts/results/pull_select_above_eq_join.plan
index d50a885..b0d8e64 100644
--- a/asterix-app/src/test/resources/optimizerts/results/pull_select_above_eq_join.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/pull_select_above_eq_join.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/push-project-through-group.plan b/asterix-app/src/test/resources/optimizerts/results/push-project-through-group.plan
index 94273ad..86bcdac 100644
--- a/asterix-app/src/test/resources/optimizerts/results/push-project-through-group.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/push-project-through-group.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/q1.plan b/asterix-app/src/test/resources/optimizerts/results/q1.plan
index 2695827..aa5daa2 100644
--- a/asterix-app/src/test/resources/optimizerts/results/q1.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/q1.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_01.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_01.plan
index 2f98801..64fdc8c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_02.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_02.plan
index 2f98801..64fdc8c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_03.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_03.plan
index 2f98801..64fdc8c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan
index f7382d8..1845ed7 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan
index f7382d8..1845ed7 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_PROJECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_01.plan
index c0e93c8..fcd1fd5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_02.plan
index c0e93c8..fcd1fd5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_03.plan
index c0e93c8..fcd1fd5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_04.plan
index c0e93c8..fcd1fd5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_05.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_05.plan
index 4d9d3f9..7ec3440 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_05.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_05.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_06.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_06.plan
index 4d9d3f9..7ec3440 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_06.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_06.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_07.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_07.plan
index 4d9d3f9..7ec3440 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_07.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_07.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_08.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_08.plan
index 4d9d3f9..7ec3440 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_08.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_08.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_01.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_02.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_03.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_04.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_05.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_05.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_05.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_05.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_06.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_06.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_06.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_06.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_07.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_07.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_07.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_07.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_08.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_08.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_08.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_08.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-edit-distance-check.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-edit-distance-check.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-edit-distance-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-edit-distance-check.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-jaccard-check.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-jaccard-check.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-jaccard-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-jaccard-check.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_01.plan
index c0e93c8..fcd1fd5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_02.plan
index c0e93c8..fcd1fd5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_03.plan
index c0e93c8..fcd1fd5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_04.plan
index c0e93c8..fcd1fd5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- STREAM_SELECT  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_05.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_05.plan
index 4d9d3f9..7ec3440 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_05.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_05.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_06.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_06.plan
index 4d9d3f9..7ec3440 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_06.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_06.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_07.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_07.plan
index 4d9d3f9..7ec3440 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_07.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_07.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_08.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_08.plan
index 4d9d3f9..7ec3440 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_08.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_08.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ASSIGN  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_01.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_01.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_02.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_02.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_03.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_03.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_04.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_04.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_05.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_05.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_05.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_05.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_06.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_06.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_06.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_06.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_07.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_07.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_07.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_07.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_08.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_08.plan
index ae0d5bb..06194e4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_08.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_08.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_SELECT  |PARTITIONED|
       -- STREAM_PROJECT  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/unnest_list_in_subplan.plan b/asterix-app/src/test/resources/optimizerts/results/unnest_list_in_subplan.plan
index 30a53b3..e9fafd7 100644
--- a/asterix-app/src/test/resources/optimizerts/results/unnest_list_in_subplan.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/unnest_list_in_subplan.plan
@@ -1,5 +1,5 @@
 -- DISTRIBUTE_RESULT  |PARTITIONED|
-  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
     -- STREAM_PROJECT  |PARTITIONED|
       -- ASSIGN  |PARTITIONED|
         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/issue395/issue395.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue395/issue395.1.ddl.aql
new file mode 100644
index 0000000..c0bbb1f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue395/issue395.1.ddl.aql
@@ -0,0 +1,10 @@
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:int32,
+name:string ?
+}
+
+create dataset Employee(Emp) primary key id;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/issue395/issue395.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue395/issue395.2.update.aql
new file mode 100644
index 0000000..fa8fb09
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue395/issue395.2.update.aql
@@ -0,0 +1,6 @@
+use dataverse test;
+
+insert into dataset Employee({"id":12,"name":"John Doe"});
+insert into dataset Employee({"id":42});
+insert into dataset Employee({"id":22,"name":"John Smith"});
+insert into dataset Employee({"id":19});
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/issue395/issue395.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue395/issue395.3.query.aql
new file mode 100644
index 0000000..8104319
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue395/issue395.3.query.aql
@@ -0,0 +1,4 @@
+use dataverse test;
+
+count(for $l in dataset Employee
+return $l.name)
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_0/issue412_0.1.ddl.aql
similarity index 100%
rename from asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.1.ddl.aql
rename to asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_0/issue412_0.1.ddl.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_0/issue412_0.2.update.aql
similarity index 100%
rename from asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.2.update.aql
rename to asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_0/issue412_0.2.update.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_0/issue412_0.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_0/issue412_0.3.query.aql
new file mode 100644
index 0000000..9b0ed8a
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_0/issue412_0.3.query.aql
@@ -0,0 +1,2 @@
+let $l := ["ASTERIX", "Hyracks", null]
+return count($l)
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_1/issue412_1.1.ddl.aql
similarity index 100%
copy from asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.1.ddl.aql
copy to asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_1/issue412_1.1.ddl.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_1/issue412_1.2.update.aql
similarity index 100%
copy from asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.2.update.aql
copy to asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_1/issue412_1.2.update.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_1/issue412_1.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_1/issue412_1.3.query.aql
new file mode 100644
index 0000000..f9bc64e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/issue412_1/issue412_1.3.query.aql
@@ -0,0 +1,2 @@
+let $l := [1, 60, null]
+return { "count": count($l), "average": avg($l), "sum": sum($l), "min": min($l), "max": max($l) } 
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/query-issue400/query-issue400.1.ddl.aql
similarity index 100%
copy from asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.1.ddl.aql
copy to asterix-app/src/test/resources/runtimets/queries/aggregate/query-issue400/query-issue400.1.ddl.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/query-issue400/query-issue400.2.update.aql
similarity index 100%
copy from asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.2.update.aql
copy to asterix-app/src/test/resources/runtimets/queries/aggregate/query-issue400/query-issue400.2.update.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/query-issue400/query-issue400.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/query-issue400/query-issue400.3.query.aql
new file mode 100644
index 0000000..9819455
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/query-issue400/query-issue400.3.query.aql
@@ -0,0 +1,9 @@
+/*
+ * Description  : This test case is to verify the fix for issue400
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=400
+ * Expected Res : Success
+ * Date         : 8th May 2013
+ */
+ 
+let $l := [[1,2,3,4,5],[6,7,8,9]]
+return count(for $i in $l return $i)
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.1.ddl.aql
index 2fbbf2f..882fecb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.1.ddl.aql
@@ -1,3 +1,10 @@
+/*
+ * Description  : This test case is to verify the fix for issue51
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=51
+ * Expected Res : SUCCESS
+ * Date         : 14th May 2013
+ */
+
 drop dataverse test if exists;
 
 create dataverse test;
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.2.update.aql
index e69de29..45fba70 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.2.update.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.2.update.aql
@@ -0,0 +1,6 @@
+/*
+ * Description  : This test case is to verify the fix for issue51
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=51
+ * Expected Res : SUCCESS
+ * Date         : 14th May 2013
+ */
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.3.query.aql
index 9996053..9781eb5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/join_q_04/join_q_04.3.query.aql
@@ -1,7 +1,19 @@
+/*
+ * Description  : This test case is to verify the fix for issue51
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=51
+ * Expected Res : SUCCESS
+ * Date         : 14th May 2013
+ */
+
 use dataverse test;
 
-for $c in dataset('Customers')
-return {"order_id" :
-for $o in dataset('Orders')
-where $c.cid = $o.cid
-return $o.oid } 
+for $c in dataset Customers
+order by $c.name
+return {
+		"cust_name": $c.name,
+		"order_ids":
+			for $o in dataset Orders
+				where $c.cid = $o.cid 
+			order by $o.oid
+			return $o.oid 
+} 
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/query-issue382/query-issue382.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue382/query-issue382.1.ddl.aql
new file mode 100644
index 0000000..d09e16d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue382/query-issue382.1.ddl.aql
@@ -0,0 +1,26 @@
+create dataverse SocialNetworkData;
+
+use dataverse SocialNetworkData;
+
+create type EmploymentType as closed {
+organization-name: string,
+start-date: date,
+end-date: date?
+}
+
+create type FacebookUserType as closed {
+id: int32,
+id-copy: int32,
+alias: string,
+name: string,
+user-since: datetime,
+user-since-copy: datetime,
+friend-ids: {{ int32 }},
+employment: [EmploymentType]
+}
+
+create dataset FacebookUsers(FacebookUserType)
+primary key id;
+
+create dataset HandbookUsers(FacebookUserType)
+primary key id;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/query-issue382/query-issue382.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue382/query-issue382.2.update.aql
new file mode 100644
index 0000000..72f89e2
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue382/query-issue382.2.update.aql
@@ -0,0 +1,10 @@
+use dataverse SocialNetworkData;
+
+load dataset FacebookUsers using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/fbu-dml-insert-shuffled.adm"),("format"="adm"));
+
+insert into dataset HandbookUsers (
+for $x in dataset FacebookUsers
+where $x.id < 10307032
+return $x
+)
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/query-issue382/query-issue382.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue382/query-issue382.3.query.aql
new file mode 100644
index 0000000..4a2f1ff
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue382/query-issue382.3.query.aql
@@ -0,0 +1,3 @@
+use dataverse SocialNetworkData;
+
+count(for $h in dataset HandbookUsers return $h);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/query-issue433/query-issue433.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue433/query-issue433.1.ddl.aql
new file mode 100644
index 0000000..e7ab9e8
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue433/query-issue433.1.ddl.aql
@@ -0,0 +1,23 @@
+/*
+ * Description  : This test case is to verify the fix for issue433
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=433
+ * Expected Res : Success
+ * Date         : 3th April 2013
+ */
+
+create dataverse insertIssue;
+use dataverse insertIssue;
+
+create type subElem as closed {
+n: string,
+e: int32?
+}
+
+create type elem as closed {
+id: int32,
+name: string,
+sub: [subElem]
+}
+
+create dataset myDataset(elem)
+primary key id;
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/query-issue433/query-issue433.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue433/query-issue433.2.update.aql
new file mode 100644
index 0000000..d06b4f5
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue433/query-issue433.2.update.aql
@@ -0,0 +1,13 @@
+/*
+ * Description  : This test case is to verify the fix for issue433
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=433
+ * Expected Res : Success
+ * Date         : 3th April 2013
+ */
+
+use dataverse insertIssue;
+
+insert into dataset myDataset (
+for $t in [ {"id":1, "name":"u1","sub":[{"n":"se1","e":100}]}, {"id":2, "name":"u2","sub":[{"n":"se2","e":200}]} ]
+return $t
+);
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/query-issue433/query-issue433.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue433/query-issue433.3.query.aql
new file mode 100644
index 0000000..b863cdf
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue433/query-issue433.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : This test case is to verify the fix for issue433
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=433
+ * Expected Res : Success
+ * Date         : 3th April 2013
+ */
+
+use dataverse insertIssue;
+
+for $d in dataset myDataset
+order by $d.id
+return $d;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql
deleted file mode 100644
index 85616e4..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql
+++ /dev/null
@@ -1,30 +0,0 @@
-drop dataverse test if exists;
-  
-create dataverse test;
-use dataverse test;
-
-create type MyRecord as closed {
-	id: int32,
-	tweetid: int64,
-	loc: point,
-	time: datetime,
-	text: string
-}
-
-create dataset MyData(MyRecord)
-  primary key id;
-
-create index rtree_index on MyData(loc) type rtree;
-
-load dataset MyData 
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/twitter/smalltweets.txt"),("format"="adm")) pre-sorted;
-
-delete $l from dataset MyData where spatial-intersect($l.loc, create-rectangle(create-point(0.0,-100.0), create-point(55.5,50.0))) die after 1000;
-
-write output to nc1:"rttest/failure_delete-rtree.adm";      
-
-for $o in dataset('MyData')
-where spatial-intersect($o.loc, create-rectangle(create-point(0.0,-100.0), create-point(55.5,50.0)))
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql
deleted file mode 100644
index 381ad3f..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql
+++ /dev/null
@@ -1,19 +0,0 @@
-drop dataverse test if exists;
-  
-create dataverse test;
-
-use dataverse test;
-
-create type MyRecord as closed {
-	id: int32,
-	tweetid: int64,
-	loc: point,
-	time: datetime,
-	text: string
-}
-
-create dataset MyData(MyRecord)
-  primary key id;
-  
-create index rtree_index_loc on MyData(loc) type rtree;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.2.update.aql
deleted file mode 100644
index 3556905..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.2.update.aql
+++ /dev/null
@@ -1,8 +0,0 @@
-use dataverse test;
-
-load dataset MyData 
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/twitter/smalltweets.txt"),("format"="adm")) pre-sorted;
-
-delete $l from dataset MyData where $l.id>=50 die after 1500;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.3.query.aql
deleted file mode 100644
index b33019b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.3.query.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-for $o in dataset('MyData')
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql
deleted file mode 100644
index 125fd99..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql
+++ /dev/null
@@ -1,38 +0,0 @@
-drop dataverse test if exists;
-  
-create dataverse test;
-use dataverse test;
-
-create type LineItemType as closed {
-  l_orderkey: int32, 
-  l_partkey: int32, 
-  l_suppkey: int32, 
-  l_linenumber: int32, 
-  l_quantity: int32, 
-  l_extendedprice: double,
-  l_discount: double, 
-  l_tax: double,
-  l_returnflag: string, 
-  l_linestatus: string, 
-  l_shipdate: string,
-  l_commitdate: string, 
-  l_receiptdate: string, 
-  l_shipinstruct: string, 
-  l_shipmode: string, 
-  l_comment: string
-}
-
-create dataset LineItem(LineItemType)
-  primary key l_orderkey, l_linenumber;
-
-load dataset LineItem 
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
-
-delete $l from dataset LineItem where $l.l_orderkey>=10 die after 1000;
-
-write output to nc1:"rttest/failure_delete.adm";      
-for $c in dataset('LineItem')
-where $c.l_orderkey>=10
-order by $c.l_orderkey, $c.l_linenumber
-return $c 
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.1.ddl.aql
deleted file mode 100644
index ea29045..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.1.ddl.aql
+++ /dev/null
@@ -1,28 +0,0 @@
-drop dataverse test if exists;
-  
-create dataverse test;
-
-use dataverse test;
-
-create type LineItemType as closed {
-  l_orderkey: int32, 
-  l_partkey: int32, 
-  l_suppkey: int32, 
-  l_linenumber: int32, 
-  l_quantity: int32, 
-  l_extendedprice: double,
-  l_discount: double, 
-  l_tax: double,
-  l_returnflag: string, 
-  l_linestatus: string, 
-  l_shipdate: string,
-  l_commitdate: string, 
-  l_receiptdate: string, 
-  l_shipinstruct: string, 
-  l_shipmode: string, 
-  l_comment: string
-}
-
-create dataset LineItem(LineItemType)
-  primary key l_orderkey, l_linenumber;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.2.update.aql
deleted file mode 100644
index ac2bd60..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.2.update.aql
+++ /dev/null
@@ -1,8 +0,0 @@
-use dataverse test;
-
-load dataset LineItem 
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
-
-delete $l from dataset LineItem where $l.l_orderkey>=10 die after 1500;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.3.query.aql
deleted file mode 100644
index 9375d30..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.3.query.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse test;
-    
-for $c in dataset('LineItem')
-where $c.l_orderkey>=10
-order by $c.l_orderkey, $c.l_linenumber
-return $c 
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.1.ddl.aql
deleted file mode 100644
index 0cb052d..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.1.ddl.aql
+++ /dev/null
@@ -1,25 +0,0 @@
-drop dataverse test if exists;
-
-create dataverse test;
-
-use dataverse test;
-
-create type MyRecord as closed {
-	id: int32,
-	tweetid: int64,
-	loc: point,
-	time: datetime,
-	text: string
-}
-
-create type MyMiniRecord as closed {
-  id: int32,
-  loc: point
-}
-
-create dataset MyData(MyRecord)
-  primary key id;
-
-create dataset MyMiniData(MyMiniRecord)
-  primary key id;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.2.update.aql
deleted file mode 100644
index bffd599..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.2.update.aql
+++ /dev/null
@@ -1,11 +0,0 @@
-use dataverse test;
-
-load dataset MyData 
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/twitter/smalltweets.txt"),("format"="adm")) pre-sorted;
-
-
-load dataset MyMiniData 
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/spatial/spatialData0.json"),("format"="adm")) pre-sorted;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.3.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.3.ddl.aql
deleted file mode 100644
index 3626933..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.3.ddl.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-create index rtree_index_loc_0 on MyData(loc) type rtree;
-create index rtree_index_loc on MyMiniData(loc) type rtree;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.4.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.4.update.aql
deleted file mode 100644
index 5874119..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.4.update.aql
+++ /dev/null
@@ -1,23 +0,0 @@
-use dataverse test;
-
-insert into dataset MyMiniData
-(
-	for $m in dataset('MyData')
-	where $m.id<1000
-	return {
-		"id": $m.id,
-		"loc": $m.loc
-	}
-);
-
-insert into dataset MyMiniData
-(
-	for $m in dataset('MyData')
-	where $m.id>=1000
-	die after 1000
-	return {
-		"id": $m.id,
-		"loc": $m.loc
-	}
-);
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.5.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.5.query.aql
deleted file mode 100644
index 6c75ca2..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.5.query.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-for $o in dataset('MyMiniData')
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.1.ddl.aql
deleted file mode 100644
index 7f5844e..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.1.ddl.aql
+++ /dev/null
@@ -1,37 +0,0 @@
-drop dataverse test if exists;
-  
-create dataverse test;
-
-use dataverse test;
-
-create type LineItemType as closed {
-  l_orderkey: int32, 
-  l_partkey: int32, 
-  l_suppkey: int32, 
-  l_linenumber: int32, 
-  l_quantity: double, 
-  l_extendedprice: double,
-  l_discount: double, 
-  l_tax: double,
-  l_returnflag: string, 
-  l_linestatus: string, 
-  l_shipdate: string,
-  l_commitdate: string, 
-  l_receiptdate: string, 
-  l_shipinstruct: string, 
-  l_shipmode: string, 
-  l_comment: string
-}
-
-create type LineIDType as closed {
-  l_orderkey: int32, 
-  l_linenumber: int32, 
-  l_suppkey: int32
-}
-
-create dataset LineItem(LineItemType)
-  primary key l_orderkey, l_linenumber;
-
-create dataset LineID(LineIDType)
-  primary key l_orderkey, l_linenumber;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.2.update.aql
deleted file mode 100644
index a1bfc0d..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.2.update.aql
+++ /dev/null
@@ -1,32 +0,0 @@
-use dataverse test;
-
-load dataset LineItem 
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
-
-load dataset LineID 
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/tpch0.001/lineitem_0.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
-
-insert into dataset LineID (
-for $l in dataset('LineItem')
-	where $l.l_orderkey<1000
-	return {
-		"l_orderkey": $l.l_orderkey,
-		"l_linenumber": $l.l_linenumber,
-		"l_suppkey": $l.l_partkey
-	}
-);
-
-insert into dataset LineID (
-for $l in dataset('LineItem')
-	where $l.l_orderkey>=1000
-	die after 1000
-	return {
-		"l_orderkey": $l.l_orderkey,
-		"l_linenumber": $l.l_linenumber,
-		"l_suppkey": $l.l_partkey
-	}
-);
-
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.3.query.aql
deleted file mode 100644
index 4a3e056..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.3.query.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse test;
-
-for $c in dataset('LineID')
-order by $c.l_orderkey, $c.l_linenumber
-return $c 
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree.aql
deleted file mode 100644
index a8d7d37..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree.aql
+++ /dev/null
@@ -1,8 +0,0 @@
-use dataverse test;
-
-write output to nc1:"rttest/failure_verify_delete-rtree.adm";
-
-for $o in dataset('MyData')
-where spatial-intersect($o.loc, create-rectangle(create-point(0.0,-100.0), create-point(55.5,50.0)))
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.1.ddl.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.1.ddl.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.2.update.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.2.update.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.3.query.aql
deleted file mode 100644
index b33019b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.3.query.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-for $o in dataset('MyData')
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.3.query.aql
deleted file mode 100644
index 2a0f3e4..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.3.query.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse test;
-
-for $c in dataset('LineItem')
-where $c.l_orderkey>=10
-order by $c.l_orderkey, $c.l_linenumber
-return $c
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.1.ddl.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.1.ddl.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.2.update.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.2.update.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.3.query.aql
deleted file mode 100644
index 6c75ca2..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.3.query.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-for $o in dataset('MyMiniData')
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.1.ddl.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.1.ddl.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.2.update.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.2.update.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.3.query.aql
deleted file mode 100644
index 4cb52c5..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.3.query.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-for $c in dataset('LineID')
-order by $c.l_orderkey, $c.l_linenumber
-return $c
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/list/query-issue428/query-issue428.1.ddl.aql
similarity index 100%
copy from asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.1.ddl.aql
copy to asterix-app/src/test/resources/runtimets/queries/list/query-issue428/query-issue428.1.ddl.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/list/query-issue428/query-issue428.2.update.aql
similarity index 100%
copy from asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.2.update.aql
copy to asterix-app/src/test/resources/runtimets/queries/list/query-issue428/query-issue428.2.update.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/list/query-issue428/query-issue428.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/list/query-issue428/query-issue428.3.query.aql
new file mode 100644
index 0000000..aa6bf88
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/list/query-issue428/query-issue428.3.query.aql
@@ -0,0 +1,10 @@
+/*
+ * Description  : This test case is to verify the fix for issue400
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=400
+ * Expected Res : Success
+ * Date         : 8th May 2013
+ */
+
+for $a in [[1,2],[3,4,5]]
+for $b in [[6,7],[8,9,10]]
+return some $a1 in $a,$b1 in $b satisfies $a1 < $b1
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/misc/ifthenelse_01/ifthenelse_01.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/misc/ifthenelse_01/ifthenelse_01.1.ddl.aql
index e69de29..e6a3879 100644
--- a/asterix-app/src/test/resources/runtimets/queries/misc/ifthenelse_01/ifthenelse_01.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/misc/ifthenelse_01/ifthenelse_01.1.ddl.aql
@@ -0,0 +1,2 @@
+drop dataverse test if exists;
+create dataverse test;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue196/query-issue196.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue196/query-issue196.1.ddl.aql
new file mode 100644
index 0000000..59e814a
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue196/query-issue196.1.ddl.aql
@@ -0,0 +1,22 @@
+/*
+ * Description  : This test case is to verify the fix for issue196
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=196
+ * Expected Res : Success
+ * Date         : 5th May 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type testtype1 as open {
+id : int32
+}
+
+create type testtype2 as open {
+id : int32
+}
+
+create dataset t1(testtype1) primary key id;
+create dataset t2(testtype2) primary key id;
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue196/query-issue196.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue196/query-issue196.2.update.aql
new file mode 100644
index 0000000..18756f9
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue196/query-issue196.2.update.aql
@@ -0,0 +1,20 @@
+/*
+ * Description  : This test case is to verify the fix for issue196
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=196
+ * Expected Res : Success
+ * Date         : 5th May 2013
+ */
+
+use dataverse test;
+
+insert into dataset t1({"id":24});
+insert into dataset t1({"id":23});
+insert into dataset t1({"id":21});
+insert into dataset t1({"id":44});
+insert into dataset t1({"id":64});
+
+insert into dataset t2({"id":24});
+insert into dataset t2({"id":23});
+insert into dataset t2({"id":21});
+insert into dataset t2({"id":44});
+insert into dataset t2({"id":64});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue196/query-issue196.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue196/query-issue196.3.query.aql
new file mode 100644
index 0000000..43d1980
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue196/query-issue196.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : This test case is to verify the fix for issue196
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=196
+ * Expected Res : Success
+ * Date         : 5th May 2013
+ */
+
+use dataverse test;
+
+let $a := (for $l in dataset('t1') order by $l.id return $l)
+let $b := (for $m in dataset('t2') order by $m.id return $m)
+return {"a":$a,"b":$b}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343-2/query-issue343-2.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343-2/query-issue343-2.1.ddl.aql
new file mode 100644
index 0000000..a49658e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343-2/query-issue343-2.1.ddl.aql
@@ -0,0 +1,47 @@
+/*
+ * Description  : This test case is to verify the fix for issue343.  It is a more general case.
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=343
+ * Expected Res : Success
+ * Date         : 30th April 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type AddressType as open {
+  number: int32,
+  street: string,
+  city: string
+}
+
+create type AllType as open {
+  id: int32,
+  name: string,
+  age: float,
+  salary: double,
+  married: boolean,
+  interests: {{string}},
+  children: [string],
+  address: AddressType,
+  dob: date,
+  time: time,
+  datetime: datetime,
+  duration: duration,
+  location2d: point,
+  location3d: point3d,
+  line: line,
+  polygon: polygon,
+  circle: circle
+}
+
+create type MyListType as open{
+	id: int32,
+	mylist: [string]
+}
+
+create dataset All(AllType)
+  primary key id;
+  
+create dataset MyList(MyListType)
+  primary key id;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343-2/query-issue343-2.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343-2/query-issue343-2.2.update.aql
new file mode 100644
index 0000000..39b85f3
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343-2/query-issue343-2.2.update.aql
@@ -0,0 +1,21 @@
+/*
+ * Description  : This test case is to verify the fix for issue343.  It is a more general case.
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=343
+ * Expected Res : Success
+ * Date         : 30th April 2013
+ */
+
+use dataverse test;
+
+insert into dataset MyList (
+ {
+   "id" : 1,
+   "mylist": ["blah"]
+ }
+);
+
+insert into dataset All (
+for $m in dataset MyList
+let $record:= { "id": 13, "name": string("Nancy"), "age": 32.5f, "salary": 12.000 ,"married": boolean("true"), "interests": {{"reading", "writing"}}, "children": ["Brad", "Scott"],  "address": {  "number": 8389,  "street": "Hill St.",  "city": "Mountain View" }, "dob": date("-2011-01-27"), "time": time("12:20:30Z"), "datetime": datetime("-1951-12-27T12:20:30"),  "duration": duration("P10Y11M12DT10H50M30S"),  "location2d": point("41.00,44.00"),  "location3d": point3d("44.00,13.00,41.00"), "line" : line("10.1,11.1 10.2,11.2"), "polygon" : polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), "circle" : circle("10.1,11.1 10.2"), "mylist" : $m.mylist }
+return $record
+);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343-2/query-issue343-2.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343-2/query-issue343-2.3.query.aql
new file mode 100644
index 0000000..e17a6e02
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343-2/query-issue343-2.3.query.aql
@@ -0,0 +1,11 @@
+/*
+ * Description  : This test case is to verify the fix for issue343.  It is a more general case.
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=343
+ * Expected Res : Success
+ * Date         : 30th April 2013
+ */
+
+use dataverse test;
+
+for $x in dataset All
+return $x
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343/query-issue343.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343/query-issue343.1.ddl.aql
new file mode 100644
index 0000000..bd2ab1a
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343/query-issue343.1.ddl.aql
@@ -0,0 +1,41 @@
+/*
+ * Description  : This test case is to verify the fix for issue343
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=343
+ * Expected Res : Success
+ * Date         : 30th April 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type AddressType as open {
+  number: int32,
+  street: string,
+  city: string
+}
+
+create type AllType as open {
+  id: int32,
+  name: string,
+  age: float,
+  salary: double,
+  married: boolean,
+  interests: {{string}},
+  children: [string],
+  address: AddressType,
+  dob: date,
+  time: time,
+  datetime: datetime,
+  duration: duration,
+  location2d: point,
+  location3d: point3d,
+  line: line,
+  polygon: polygon,
+  circle: circle
+}
+
+create dataset All(AllType)
+  primary key id;
+  
+ 
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343/query-issue343.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343/query-issue343.2.update.aql
new file mode 100644
index 0000000..eaafddd
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343/query-issue343.2.update.aql
@@ -0,0 +1,14 @@
+/*
+ * Description  : This test case is to verify the fix for issue343
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=343
+ * Expected Res : Success
+ * Date         : 30th April 2013
+ */
+
+use dataverse test;
+
+insert into dataset All (
+let $addedList := ["blah"]
+let $record:= { "id": 13, "name": string("Nancy"), "age": 32.5f, "salary": 12.000 ,"married": boolean("true"), "interests": {{"reading", "writing"}}, "children": ["Brad", "Scott"],  "address": {  "number": 8389,  "street": "Hill St.",  "city": "Mountain View" }, "dob": date("-2011-01-27"), "time": time("12:20:30Z"), "datetime": datetime("-1951-12-27T12:20:30"),  "duration": duration("P10Y11M12DT10H50M30S"),  "location2d": point("41.00,44.00"),  "location3d": point3d("44.00,13.00,41.00"), "line" : line("10.1,11.1 10.2,11.2"), "polygon" : polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), "circle" : circle("10.1,11.1 10.2"), "mylist" : $addedList }
+return $record
+)
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343/query-issue343.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343/query-issue343.3.query.aql
new file mode 100644
index 0000000..9dfde51
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue343/query-issue343.3.query.aql
@@ -0,0 +1,11 @@
+/*
+ * Description  : This test case is to verify the fix for issue343
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=343
+ * Expected Res : Success
+ * Date         : 30th April 2013
+ */
+
+use dataverse test;
+
+for $x in dataset All
+return $x
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350-2/query-issue350-2.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350-2/query-issue350-2.1.ddl.aql
new file mode 100644
index 0000000..85f2160
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350-2/query-issue350-2.1.ddl.aql
@@ -0,0 +1,17 @@
+/*
+ * Description  : This test case is to verify the fix for issue350
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=350
+ * Expected Res : Success
+ * Date         : 28th April 2013
+ */
+
+drop dataverse TinySocial if exists;
+create dataverse TinySocial;
+use dataverse TinySocial;
+
+create type TweetMessageType as open {
+tweetid: string
+};
+
+create dataset TweetMessages(TweetMessageType)
+primary key tweetid; 
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350-2/query-issue350-2.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350-2/query-issue350-2.2.update.aql
new file mode 100644
index 0000000..372d7ca
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350-2/query-issue350-2.2.update.aql
@@ -0,0 +1,50 @@
+/*
+ * Description  : This test case is to verify the fix for issue350
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=350
+ * Expected Res : Success
+ * Date         : 28th April 2013
+ */
+
+use dataverse TinySocial;
+
+load dataset TweetMessages
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/twitter/tw_messages.adm"),("format"="adm"));
+
+insert into dataset TweetMessages
+(
+   {"tweetid":"13",
+    "user":
+        {"screen-name":"NathanGiesen@211",
+         "lang":"en",
+         "friends_count":39345,
+         "statuses_count":479,
+         "name":"Nathan Giesen",
+         "followers_count":49420,
+         "hobbies":["basket weaving","mud wrestling"]
+        },
+    "sender-location":point("47.44,80.65"),
+    "send-time":datetime("2008-04-26T10:10:35"),
+    "referred-topics":{{"tweeting"}},
+    "message-text":"tweety tweet, my fellow tweeters!"
+   }
+);
+
+insert into dataset TweetMessages
+(
+   {"tweetid":"15",
+    "user":
+        {"screen-name":"Jason17",
+         "lang":"en",
+         "friends_count":393,
+         "statuses_count":47,
+         "name":"Nathan Giesen",
+         "followers_count":420,
+         "hobbies":["swimming"]
+        },
+    "sender-location":point("49.44,80.65"),
+    "send-time":datetime("2009-04-26T10:10:35"),
+    "referred-topics":{{"nothing"}},
+    "message-text":"Nothing to say!"
+   }
+);
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350-2/query-issue350-2.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350-2/query-issue350-2.3.query.aql
new file mode 100644
index 0000000..298940a
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350-2/query-issue350-2.3.query.aql
@@ -0,0 +1,13 @@
+/*
+ * Description  : This test case is to verify the fix for issue350
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=350
+ * Expected Res : Success
+ * Date         : 28th April 2013
+ */
+
+use dataverse TinySocial;
+
+for $tm  in dataset TweetMessages
+where (every $h in $tm.user.hobbies satisfies $h = "basket weaving")
+order by $tm.tweetid
+return $tm;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350/query-issue350.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350/query-issue350.1.ddl.aql
new file mode 100644
index 0000000..85f2160
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350/query-issue350.1.ddl.aql
@@ -0,0 +1,17 @@
+/*
+ * Description  : This test case is to verify the fix for issue350
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=350
+ * Expected Res : Success
+ * Date         : 28th April 2013
+ */
+
+drop dataverse TinySocial if exists;
+create dataverse TinySocial;
+use dataverse TinySocial;
+
+create type TweetMessageType as open {
+tweetid: string
+};
+
+create dataset TweetMessages(TweetMessageType)
+primary key tweetid; 
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350/query-issue350.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350/query-issue350.2.update.aql
new file mode 100644
index 0000000..372d7ca
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350/query-issue350.2.update.aql
@@ -0,0 +1,50 @@
+/*
+ * Description  : This test case is to verify the fix for issue350
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=350
+ * Expected Res : Success
+ * Date         : 28th April 2013
+ */
+
+use dataverse TinySocial;
+
+load dataset TweetMessages
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/twitter/tw_messages.adm"),("format"="adm"));
+
+insert into dataset TweetMessages
+(
+   {"tweetid":"13",
+    "user":
+        {"screen-name":"NathanGiesen@211",
+         "lang":"en",
+         "friends_count":39345,
+         "statuses_count":479,
+         "name":"Nathan Giesen",
+         "followers_count":49420,
+         "hobbies":["basket weaving","mud wrestling"]
+        },
+    "sender-location":point("47.44,80.65"),
+    "send-time":datetime("2008-04-26T10:10:35"),
+    "referred-topics":{{"tweeting"}},
+    "message-text":"tweety tweet, my fellow tweeters!"
+   }
+);
+
+insert into dataset TweetMessages
+(
+   {"tweetid":"15",
+    "user":
+        {"screen-name":"Jason17",
+         "lang":"en",
+         "friends_count":393,
+         "statuses_count":47,
+         "name":"Nathan Giesen",
+         "followers_count":420,
+         "hobbies":["swimming"]
+        },
+    "sender-location":point("49.44,80.65"),
+    "send-time":datetime("2009-04-26T10:10:35"),
+    "referred-topics":{{"nothing"}},
+    "message-text":"Nothing to say!"
+   }
+);
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350/query-issue350.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350/query-issue350.3.query.aql
new file mode 100644
index 0000000..79a620b
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue350/query-issue350.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : This test case is to verify the fix for issue350
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=350
+ * Expected Res : Success
+ * Date         : 28th April 2013
+ */
+
+use dataverse TinySocial;
+
+for $tm  in dataset TweetMessages
+where (some $h in $tm.user.hobbies satisfies $h = "basket weaving")
+return $tm;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.1.ddl.aql
new file mode 100644
index 0000000..4722e4f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.1.ddl.aql
@@ -0,0 +1,39 @@
+/*
+ * Description  : This test case is to verify the fix for issue377
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=377
+ * Expected Res : Success
+ * Date         : 11th May 2013
+ */
+
+drop dataverse TinySocial if exists;
+create dataverse TinySocial;
+use dataverse TinySocial;
+
+create type TwitterUserType as open {
+        screen-name: string
+}
+
+create type TweetMessageType as open {
+        tweetid: string
+}
+
+create type FacebookUserType as open {
+        id: int32
+}
+
+create type FacebookMessageType as open {
+        message-id: int32
+}
+
+create dataset FacebookUsers(FacebookUserType)
+primary key id;
+
+create dataset FacebookMessages(FacebookMessageType)
+primary key message-id;
+
+create dataset TwitterUsers(TwitterUserType)
+primary key screen-name;
+
+create dataset TweetMessages(TweetMessageType)
+primary key tweetid
+hints(cardinality=100); 
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.2.update.aql
new file mode 100644
index 0000000..1d6c5ba
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.2.update.aql
@@ -0,0 +1,15 @@
+/*
+ * Description  : This test case is to verify the fix for issue377
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=377
+ * Expected Res : Success
+ * Date         : 11th May 2013
+ */
+
+use dataverse TinySocial;
+
+load dataset FacebookUsers using localfs
+(("path"="nc1://data/fbu-dml-insert-shuffled.adm"),("format"="adm"));
+
+load dataset TweetMessages using localfs
+(("path"="nc1://data/twitter/tw_messages.adm"),("format"="adm"));
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.3.query.aql
new file mode 100644
index 0000000..81d6cf6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue377/query-issue377.3.query.aql
@@ -0,0 +1,24 @@
+/*
+ * Description  : This test case is to verify the fix for issue377
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=377
+ * Expected Res : Success
+ * Date         : 11th May 2013
+ */
+
+use dataverse TinySocial;
+
+set simfunction "edit-distance";
+set simthreshold "3";
+
+for $fbu in dataset FacebookUsers
+return {
+    "id": $fbu.id,
+    "name": $fbu.name,
+    "similar-users": for $t in dataset TweetMessages
+                        let $tu := $t.user
+                        where $tu.name ~= $fbu.name
+                        return {
+                        "twitter-screenname": $tu.screen-name,
+                        "twitter-name": $tu.name
+                        }
+};
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue410/query-issue410.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue410/query-issue410.1.ddl.aql
new file mode 100644
index 0000000..0da0aa5
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue410/query-issue410.1.ddl.aql
@@ -0,0 +1,17 @@
+/*
+ * Description  : This test case is to verify the fix for issue410
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=410
+ * Expected Res : Fail
+ * Date         : 13th May 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Emp as open {
+id:int32,
+name:string
+}
+
+create dataset Employee(Emp) primary key id;
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue410/query-issue410.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue410/query-issue410.2.update.aql
new file mode 100644
index 0000000..796c5a4
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue410/query-issue410.2.update.aql
@@ -0,0 +1,10 @@
+/*
+ * Description  : This test case is to verify the fix for issue410
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=410
+ * Expected Res : Fail
+ * Date         : 11th May 2013
+ */
+
+use dataverse test;
+
+insert into dataset Employee({"id":float("59138237473282.3293"), "name": double("0.01")});	
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue410/query-issue410.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue410/query-issue410.3.query.aql
new file mode 100644
index 0000000..d76caba
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue410/query-issue410.3.query.aql
@@ -0,0 +1,11 @@
+/*
+ * Description  : This test case is to verify the fix for issue410
+ 				: https://code.google.com/p/asterixdb/issues/detail?id=410
+ * Expected Res : Fail
+ * Date         : 11th May 2013
+ */
+
+use dataverse test;
+
+for $x in dataset('Employee')
+return $x
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/aggregate/count_null/count_null.1.adm b/asterix-app/src/test/resources/runtimets/results/aggregate/count_null/count_null.1.adm
index 51d5f4f..df462fe 100644
--- a/asterix-app/src/test/resources/runtimets/results/aggregate/count_null/count_null.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/aggregate/count_null/count_null.1.adm
@@ -1 +1 @@
-{ "count": null }
\ No newline at end of file
+{ "count": 2 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/aggregate/issue395/issue395.1.adm b/asterix-app/src/test/resources/runtimets/results/aggregate/issue395/issue395.1.adm
new file mode 100644
index 0000000..bf0d87a
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/aggregate/issue395/issue395.1.adm
@@ -0,0 +1 @@
+4
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/aggregate/issue412_0/issue412_0.1.adm b/asterix-app/src/test/resources/runtimets/results/aggregate/issue412_0/issue412_0.1.adm
new file mode 100644
index 0000000..e440e5c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/aggregate/issue412_0/issue412_0.1.adm
@@ -0,0 +1 @@
+3
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/aggregate/issue412_1/issue412_1.1.adm b/asterix-app/src/test/resources/runtimets/results/aggregate/issue412_1/issue412_1.1.adm
new file mode 100644
index 0000000..7c1107a
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/aggregate/issue412_1/issue412_1.1.adm
@@ -0,0 +1 @@
+{ "count": 3, "average": null, "sum": null, "min": null, "max": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/aggregate/query-issue400/query-issue400.1.adm b/asterix-app/src/test/resources/runtimets/results/aggregate/query-issue400/query-issue400.1.adm
new file mode 100644
index 0000000..d8263ee
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/aggregate/query-issue400/query-issue400.1.adm
@@ -0,0 +1 @@
+2
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_count_null/scalar_count_null.1.adm b/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_count_null/scalar_count_null.1.adm
index 1abbc3f..4ff1111 100644
--- a/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_count_null/scalar_count_null.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/aggregate/scalar_count_null/scalar_count_null.1.adm
@@ -1,7 +1,7 @@
-null
-null
-null
-null
-null
-null
-null
\ No newline at end of file
+4
+4
+4
+4
+4
+4
+4
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/custord/join_q_04/join_q_04.1.adm b/asterix-app/src/test/resources/runtimets/results/custord/join_q_04/join_q_04.1.adm
index d4721dd..51f998a 100644
--- a/asterix-app/src/test/resources/runtimets/results/custord/join_q_04/join_q_04.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/custord/join_q_04/join_q_04.1.adm
@@ -1,3 +1,5 @@
-{ "cust_name": "Jodi Rotruck", "orderedlist": [ 1000, 66, 775 ], "unorderedlist": {{ 1000, 66, 775 }}, "ol_item1": 1000, "ol_item2": 66, "ol_item5": null, "ul_item1": 1000 }
-{ "cust_name": "Jodi Alex", "orderedlist": [ 10, 48, 5 ], "unorderedlist": {{ 10, 48, 5 }}, "ol_item1": 10, "ol_item2": 48, "ol_item5": null, "ul_item1": 10 }
-{ "cust_name": "Jodi Rotruck", "orderedlist": [ 10, 66, 775 ], "unorderedlist": {{ 10, 66, 775 }}, "ol_item1": 10, "ol_item2": 66, "ol_item5": null, "ul_item1": 10 }
+{ "cust_name": "Jodi Alex", "order_ids": [ 10 ] }
+{ "cust_name": "Jodi Rotruck", "order_ids": [ 10, 1000 ] }
+{ "cust_name": "Mary Carey", "order_ids": [  ] }
+{ "cust_name": "Mike Carey", "order_ids": [  ] }
+{ "cust_name": "Mike ley", "order_ids": [  ] }
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/query-issue382/query-issue382.1.adm b/asterix-app/src/test/resources/runtimets/results/dml/query-issue382/query-issue382.1.adm
new file mode 100644
index 0000000..9a03714
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/query-issue382/query-issue382.1.adm
@@ -0,0 +1 @@
+10
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/query-issue433/query-issue433.1.adm b/asterix-app/src/test/resources/runtimets/results/dml/query-issue433/query-issue433.1.adm
new file mode 100644
index 0000000..ce89449
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/query-issue433/query-issue433.1.adm
@@ -0,0 +1,2 @@
+{ "id": 1, "name": "u1", "sub": [ { "n": "se1", "e": 100 } ] }
+{ "id": 2, "name": "u2", "sub": [ { "n": "se2", "e": 200 } ] }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/list/query-issue428/query-issue428.1.adm b/asterix-app/src/test/resources/runtimets/results/list/query-issue428/query-issue428.1.adm
new file mode 100644
index 0000000..1140ff5
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/list/query-issue428/query-issue428.1.adm
@@ -0,0 +1,4 @@
+true
+true
+true
+true
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue196/query-issue196.1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue196/query-issue196.1.adm
new file mode 100644
index 0000000..f93766f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue196/query-issue196.1.adm
@@ -0,0 +1 @@
+{ "a": [ { "id": 21 }, { "id": 23 }, { "id": 24 }, { "id": 44 }, { "id": 64 } ], "b": [ { "id": 21 }, { "id": 23 }, { "id": 24 }, { "id": 44 }, { "id": 64 } ] }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue343-2/query-issue343-2.1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue343-2/query-issue343-2.1.adm
new file mode 100644
index 0000000..5196a0f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue343-2/query-issue343-2.1.adm
@@ -0,0 +1 @@
+{ "id": 13, "name": "Nancy", "age": 32.5f, "salary": 12.0d, "married": true, "interests": {{ "reading", "writing" }}, "children": [ "Brad", "Scott" ], "address": { "number": 8389, "street": "Hill St.", "city": "Mountain View" }, "dob": date("-2011-01-27"), "time": time("12:20:30.000Z"), "datetime": datetime("-1951-12-27T12:20:30.000Z"), "duration": duration("P10Y11M12DT10H50M30S"), "location2d": point("41.0,44.0"), "location3d": point3d("44.0,13.0,41.0"), "line": line("10.1,11.1 10.2,11.2"), "polygon": polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), "circle": circle("10.1,11.1 10.2"), "mylist": [ "blah" ] }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue343/query-issue343.1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue343/query-issue343.1.adm
new file mode 100644
index 0000000..5196a0f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue343/query-issue343.1.adm
@@ -0,0 +1 @@
+{ "id": 13, "name": "Nancy", "age": 32.5f, "salary": 12.0d, "married": true, "interests": {{ "reading", "writing" }}, "children": [ "Brad", "Scott" ], "address": { "number": 8389, "street": "Hill St.", "city": "Mountain View" }, "dob": date("-2011-01-27"), "time": time("12:20:30.000Z"), "datetime": datetime("-1951-12-27T12:20:30.000Z"), "duration": duration("P10Y11M12DT10H50M30S"), "location2d": point("41.0,44.0"), "location3d": point3d("44.0,13.0,41.0"), "line": line("10.1,11.1 10.2,11.2"), "polygon": polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), "circle": circle("10.1,11.1 10.2"), "mylist": [ "blah" ] }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue350-2/query-issue350-2.1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue350-2/query-issue350-2.1.adm
new file mode 100644
index 0000000..6466feb
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue350-2/query-issue350-2.1.adm
@@ -0,0 +1,10 @@
+{ "tweetid": "1", "tweetid-copy": "1", "user": { "screen-name": "RollandEckhardstein#211", "lang": "en", "friends_count": 3657079, "statuses_count": 268, "name": "Rolland Eckhardstein", "followers_count": 3311368 }, "sender-location": point("42.13,80.43"), "send-time": datetime("2005-12-05T21:06:41.000Z"), "send-time-copy": datetime("2005-12-05T21:06:41.000Z"), "referred-topics": {{ "samsung", "plan" }}, "message-text": " love samsung the plan is amazing" }
+{ "tweetid": "10", "tweetid-copy": "10", "user": { "screen-name": "Rolldstein#211", "lang": "en", "friends_count": 3657079, "statuses_count": 268, "name": "Rolland Eckhardstful", "followers_count": 3311368 }, "sender-location": point("46.94,93.98"), "send-time": datetime("2011-04-07T14:08:46.000Z"), "send-time-copy": datetime("2011-04-07T14:08:46.000Z"), "referred-topics": {{ "t-mobile", "signal" }}, "message-text": " like t-mobile the signal is good" }
+{ "tweetid": "2", "tweetid-copy": "2", "user": { "screen-name": "RollandEckhardstein#211", "lang": "en", "friends_count": 3657079, "statuses_count": 268, "name": "David Eckhardstein", "followers_count": 3311368 }, "sender-location": point("28.86,70.44"), "send-time": datetime("2007-08-15T06:44:17.000Z"), "send-time-copy": datetime("2007-08-15T06:44:17.000Z"), "referred-topics": {{ "sprint", "voice-clarity" }}, "message-text": " like sprint its voice-clarity is mind-blowing" }
+{ "tweetid": "3", "tweetid-copy": "3", "user": { "screen-name": "RollandEckhard#500", "lang": "en", "friends_count": 3657079, "statuses_count": 268, "name": "Rolland Hetfield", "followers_count": 3311368 }, "sender-location": point("39.84,86.48"), "send-time": datetime("2008-12-24T00:07:04.000Z"), "send-time-copy": datetime("2008-12-24T00:07:04.000Z"), "referred-topics": {{ "verizon", "voice-command" }}, "message-text": " can't stand verizon its voice-command is terrible:(" }
+{ "tweetid": "4", "tweetid-copy": "4", "user": { "screen-name": "RollandEckhardstein#221", "lang": "en", "friends_count": 3657079, "statuses_count": 268, "name": "Rolland Eckhardstinz", "followers_count": 3311368 }, "sender-location": point("27.67,87.32"), "send-time": datetime("2007-02-05T16:39:13.000Z"), "send-time-copy": datetime("2007-02-05T16:39:13.000Z"), "referred-topics": {{ "t-mobile", "customer-service" }}, "message-text": " love t-mobile its customer-service is mind-blowing" }
+{ "tweetid": "5", "tweetid-copy": "5", "user": { "screen-name": "RollandEcstein#211", "lang": "en", "friends_count": 3657079, "statuses_count": 268, "name": "Rolland Eckhardst", "followers_count": 3311368 }, "sender-location": point("27.3,92.77"), "send-time": datetime("2010-09-12T06:15:28.000Z"), "send-time-copy": datetime("2010-09-12T06:15:28.000Z"), "referred-topics": {{ "t-mobile", "customization" }}, "message-text": " like t-mobile the customization is amazing:)" }
+{ "tweetid": "6", "tweetid-copy": "6", "user": { "screen-name": "Rollkhardstein#211", "lang": "en", "friends_count": 3657079, "statuses_count": 268, "name": "Kirk Hammette ", "followers_count": 3311368 }, "sender-location": point("45.62,84.78"), "send-time": datetime("2012-01-23T06:23:13.000Z"), "send-time-copy": datetime("2012-01-23T06:23:13.000Z"), "referred-topics": {{ "iphone", "network" }}, "message-text": " like iphone its network is awesome:)" }
+{ "tweetid": "7", "tweetid-copy": "7", "user": { "screen-name": "andEckhardstein#211", "lang": "en", "friends_count": 3657079, "statuses_count": 268, "name": "Rolland khardstein", "followers_count": 3311368 }, "sender-location": point("44.12,81.46"), "send-time": datetime("2012-02-17T17:30:26.000Z"), "send-time-copy": datetime("2012-02-17T17:30:26.000Z"), "referred-topics": {{ "t-mobile", "network" }}, "message-text": " hate t-mobile the network is bad" }
+{ "tweetid": "8", "tweetid-copy": "8", "user": { "screen-name": "Rolltein#211", "lang": "en", "friends_count": 3657079, "statuses_count": 268, "name": "Ron Eckhardstein", "followers_count": 3311368 }, "sender-location": point("36.86,90.71"), "send-time": datetime("2009-03-12T13:18:04.000Z"), "send-time-copy": datetime("2009-03-12T13:18:04.000Z"), "referred-topics": {{ "at&t", "touch-screen" }}, "message-text": " dislike at&t its touch-screen is OMG" }
+{ "tweetid": "9", "tweetid-copy": "9", "user": { "screen-name": "Roldstein#211", "lang": "en", "friends_count": 3657079, "statuses_count": 268, "name": "Rolland Eckdstein", "followers_count": 3311368 }, "sender-location": point("29.07,97.05"), "send-time": datetime("2012-08-15T20:19:46.000Z"), "send-time-copy": datetime("2012-08-15T20:19:46.000Z"), "referred-topics": {{ "verizon", "speed" }}, "message-text": " hate verizon its speed is bad" }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue350/query-issue350.1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue350/query-issue350.1.adm
new file mode 100644
index 0000000..e5ac8ab
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue350/query-issue350.1.adm
@@ -0,0 +1 @@
+{ "tweetid": "13", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39345, "statuses_count": 479, "name": "Nathan Giesen", "followers_count": 49420, "hobbies": [ "basket weaving", "mud wrestling" ] }, "sender-location": point("47.44,80.65"), "send-time": datetime("2008-04-26T10:10:35.000Z"), "referred-topics": {{ "tweeting" }}, "message-text": "tweety tweet, my fellow tweeters!" }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue377/query-issue377.1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue377/query-issue377.1.adm
new file mode 100644
index 0000000..c3bb80f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue377/query-issue377.1.adm
@@ -0,0 +1,29 @@
+{ "id": 9142198, "similar-users": [  ], "name": "SherryFea" }
+{ "id": 9313492, "similar-users": [  ], "name": "TeraWolfe" }
+{ "id": 9478720, "similar-users": [  ], "name": "AngeliaKettlewell" }
+{ "id": 10001080, "similar-users": [  ], "name": "GarrettBode" }
+{ "id": 10179538, "similar-users": [  ], "name": "OrlandoBaxter" }
+{ "id": 10307032, "similar-users": [  ], "name": "QuentinSauter" }
+{ "id": 10394488, "similar-users": [  ], "name": "OswaldRay" }
+{ "id": 10423588, "similar-users": [  ], "name": "ShirleneRuch" }
+{ "id": 10495420, "similar-users": [  ], "name": "WendyMcloskey" }
+{ "id": 11307946, "similar-users": [  ], "name": "HelgaStough" }
+{ "id": 11447332, "similar-users": [  ], "name": "SherisseMaugham" }
+{ "id": 11570326, "similar-users": [  ], "name": "LindenFilby" }
+{ "id": 11951098, "similar-users": [  ], "name": "TeraByers" }
+{ "id": 11954992, "similar-users": [  ], "name": "CaitlinLangston" }
+{ "id": 9510451, "similar-users": [  ], "name": "ChuckFinck" }
+{ "id": 9594523, "similar-users": [  ], "name": "TamWillcox" }
+{ "id": 9629395, "similar-users": [  ], "name": "JuliusWire" }
+{ "id": 9988417, "similar-users": [  ], "name": "ColineLane" }
+{ "id": 10272571, "similar-users": [  ], "name": "JarrettGoldvogel" }
+{ "id": 10361965, "similar-users": [  ], "name": "ArlenFlick" }
+{ "id": 10498285, "similar-users": [  ], "name": "KileyBridger" }
+{ "id": 10733617, "similar-users": [  ], "name": "LeonardoKight" }
+{ "id": 10874791, "similar-users": [  ], "name": "HaydeeGarratt" }
+{ "id": 10957867, "similar-users": [  ], "name": "ZachOppenheimer" }
+{ "id": 11061631, "similar-users": [  ], "name": "MaxeneKellogg" }
+{ "id": 11068231, "similar-users": [  ], "name": "DinahSwink" }
+{ "id": 11140213, "similar-users": [  ], "name": "MontgomeryWhittier" }
+{ "id": 11381089, "similar-users": [  ], "name": "EarleneAmmons" }
+{ "id": 11675221, "similar-users": [  ], "name": "CalantheGearhart" }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue410/query-issue410.1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue410/query-issue410.1.adm
new file mode 100644
index 0000000..ee2dfa4
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue410/query-issue410.1.adm
@@ -0,0 +1 @@
+{"id":0, "name": ""}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/spatial/create-rtree-index/create-rtree-index.1.adm b/asterix-app/src/test/resources/runtimets/results/spatial/create-rtree-index/create-rtree-index.1.adm
index 6a2a2c6..b9c6dc7 100644
--- a/asterix-app/src/test/resources/runtimets/results/spatial/create-rtree-index/create-rtree-index.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/spatial/create-rtree-index/create-rtree-index.1.adm
@@ -1,21 +1,21 @@
-1
 2
-3
 4
-5
 6
-7
 8
-9
 10
-11
 12
-13
 14
-15
 16
-17
 18
-19
 20
-21
\ No newline at end of file
+1
+3
+5
+7
+9
+11
+13
+15
+17
+19
+21
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index faf635c..4403429 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -1,6 +1,26 @@
 <test-suite xmlns="urn:xml.testframework.asterix.ics.uci.edu" ResultOffsetPath="results" QueryOffsetPath="queries" QueryFileExtension=".aql">
   <test-group name="aggregate">
     <test-case FilePath="aggregate">
+      <compilation-unit name="query-issue400">
+        <output-dir compare="Text">query-issue400</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="issue395">
+        <output-dir compare="Text">issue395</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="issue412_0">
+        <output-dir compare="Text">issue412_0</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="issue412_1">
+        <output-dir compare="Text">issue412_1</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
       <compilation-unit name="avg_double">
         <output-dir compare="Text">avg_double</output-dir>
       </compilation-unit>
@@ -97,6 +117,9 @@
       </compilation-unit>
     </test-case>
     -->
+    <!-- TODO(madhusudancs): These tests that test for local_<agg>/global_<agg> functions should be removed, but
+    before that we should modify the code to make sure those built-in functions are still defined but not exposed
+    by AQL, so leaving these test cases commented.
     <test-case FilePath="aggregate">
       <compilation-unit name="global-avg_01">
         <output-dir compare="Text">global-avg_01</output-dir>
@@ -167,6 +190,7 @@
         <output-dir compare="Text">local-avg_int8_null</output-dir>
       </compilation-unit>
     </test-case>
+    -->
     <test-case FilePath="aggregate">
       <compilation-unit name="max_empty_01">
         <output-dir compare="Text">max_empty_01</output-dir>
@@ -685,13 +709,11 @@
         <output-dir compare="Text">join_q_03</output-dir>
       </compilation-unit>
     </test-case>
-    <!--
     <test-case FilePath="custord">
       <compilation-unit name="join_q_04">
         <output-dir compare="Text">join_q_04</output-dir>
       </compilation-unit>
     </test-case>
-    -->
     <test-case FilePath="custord">
       <compilation-unit name="load-test">
         <output-dir compare="Text">load-test</output-dir>
@@ -749,6 +771,16 @@
   </test-group>
   <test-group name="dml">
      <test-case FilePath="dml">
+      <compilation-unit name="query-issue382">
+        <output-dir compare="Text">query-issue382</output-dir>
+      </compilation-unit>
+     </test-case>
+     <test-case FilePath="dml">
+      <compilation-unit name="query-issue433">
+        <output-dir compare="Text">query-issue433</output-dir>
+      </compilation-unit>
+     </test-case>
+     <test-case FilePath="dml">
       <compilation-unit name="query-issue288">
         <output-dir compare="Text">query-issue288</output-dir>
       </compilation-unit>
@@ -1039,42 +1071,6 @@
     </test-case>
   </test-group>
   <test-group name="failure">
-    <test-case FilePath="failure">
-      <compilation-unit name="delete-rtree">
-        <output-dir compare="Text">delete-rtree</output-dir>
-        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
-      </compilation-unit>
-      <compilation-unit name="verify_delete-rtree">
-        <output-dir compare="Text">delete-rtree</output-dir>
-      </compilation-unit>
-    </test-case>
-    <test-case FilePath="failure">
-      <compilation-unit name="delete">
-        <output-dir compare="Text">delete</output-dir>
-        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
-      </compilation-unit>
-      <compilation-unit name="verify_delete">
-        <output-dir compare="Text">delete</output-dir>
-      </compilation-unit>
-    </test-case>
-    <test-case FilePath="failure">
-      <compilation-unit name="insert-rtree">
-        <output-dir compare="Text">insert-rtree</output-dir>
-        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
-      </compilation-unit>
-      <compilation-unit name="verify_insert-rtree">
-        <output-dir compare="Text">insert-rtree</output-dir>
-      </compilation-unit>
-    </test-case>
-    <test-case FilePath="failure">
-      <compilation-unit name="insert">
-        <output-dir compare="Text">insert</output-dir>
-        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
-      </compilation-unit>
-      <compilation-unit name="verify_insert">
-        <output-dir compare="Text">insert</output-dir>
-      </compilation-unit>
-    </test-case>
     <!--
     <test-case FilePath="failure">
       <compilation-unit name="q1_pricing_summary_report_failure">
@@ -2131,6 +2127,11 @@
         <output-dir compare="Text">unordered-list-constructor_03</output-dir>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="list">
+      <compilation-unit name="query-issue428">
+        <output-dir compare="Text">query-issue428</output-dir>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="misc">
   <test-case FilePath="misc">
@@ -2698,6 +2699,42 @@
         <output-dir compare="Text">query-proposal</output-dir>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="open-closed">
+      <compilation-unit name="query-issue350">
+        <output-dir compare="Text">query-issue350</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="open-closed">
+      <compilation-unit name="query-issue350-2">
+        <output-dir compare="Text">query-issue350-2</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="open-closed">
+      <compilation-unit name="query-issue343">
+        <output-dir compare="Text">query-issue343</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="open-closed">
+      <compilation-unit name="query-issue343-2">
+        <output-dir compare="Text">query-issue343-2</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="open-closed">
+      <compilation-unit name="query-issue196">
+        <output-dir compare="Text">query-issue196</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="open-closed">
+      <compilation-unit name="query-issue377">
+        <output-dir compare="Text">query-issue377</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="open-closed">
+      <compilation-unit name="query-issue410">
+        <output-dir compare="Text">query-issue40</output-dir>
+        <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="quantifiers">
     <test-case FilePath="quantifiers">
@@ -4228,4 +4265,4 @@
       </compilation-unit>
     </test-case>
   </test-group>
-</test-suite>
\ No newline at end of file
+</test-suite>
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java
index 8be2d40..3e92653 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java
@@ -9,7 +9,6 @@
         WHERE_CLAUSE,
         ORDER_BY_CLAUSE,
         LIMIT_CLAUSE,
-        DIE_CLAUSE,
         GROUP_BY_CLAUSE,
         DISTINCT_BY_CLAUSE,
         UPDATE_CLAUSE
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java
index ffd0534..7a764ae 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java
@@ -12,7 +12,6 @@
 public class CreateIndexStatement implements Statement {
 
     private Identifier indexName;
-    private boolean needToCreate = true;
     private Identifier dataverseName;
     private Identifier datasetName;
     private List<String> fieldExprs = new ArrayList<String>();
@@ -33,14 +32,6 @@
         return gramLength;
     }
 
-    public void setNeedToCreate(boolean needToCreate) {
-        this.needToCreate = needToCreate;
-    }
-
-    public boolean getNeedToCreate() {
-        return needToCreate;
-    }
-
     public Identifier getIndexName() {
         return indexName;
     }
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
index 48b7909..b56d628 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
@@ -1,6 +1,5 @@
 package edu.uci.ics.asterix.aql.expression;
 
-import edu.uci.ics.asterix.aql.base.Clause;
 import edu.uci.ics.asterix.aql.base.Expression;
 import edu.uci.ics.asterix.aql.base.Statement;
 import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
@@ -13,16 +12,14 @@
     private Identifier dataverseName;
     private Identifier datasetName;
     private Expression condition;
-    private Clause dieClause;
     private int varCounter;
 
     public DeleteStatement(VariableExpr vars, Identifier dataverseName, Identifier datasetName, Expression condition,
-            Clause dieClause, int varCounter) {
+            int varCounter) {
         this.vars = vars;
         this.dataverseName = dataverseName;
         this.datasetName = datasetName;
         this.condition = condition;
-        this.dieClause = dieClause;
         this.varCounter = varCounter;
     }
 
@@ -47,10 +44,6 @@
         return condition;
     }
 
-    public Clause getDieClause() {
-        return dieClause;
-    }
-
     public int getVarCounter() {
         return varCounter;
     }
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DieClause.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DieClause.java
deleted file mode 100644
index 3a77d58..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DieClause.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Clause;
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class DieClause implements Clause {
-    private Expression expr;
-
-    public DieClause() {
-    }
-
-    public DieClause(Expression dieexpr) {
-        this.expr = dieexpr;
-    }
-
-    public Expression getDieExpr() {
-        return expr;
-    }
-
-    public void setDieExpr(Expression dieexpr) {
-        this.expr = dieexpr;
-    }
-
-    @Override
-    public ClauseType getClauseType() {
-        return ClauseType.DIE_CLAUSE;
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitDieClause(this, arg);
-    }
-}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java
index ab55e34..3506e61 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java
@@ -7,7 +7,6 @@
 import edu.uci.ics.asterix.aql.base.Clause;
 import edu.uci.ics.asterix.aql.base.Expression;
 import edu.uci.ics.asterix.aql.base.Literal;
-import edu.uci.ics.asterix.aql.base.Statement;
 import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
 import edu.uci.ics.asterix.aql.expression.CallExpr;
 import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
@@ -18,7 +17,6 @@
 import edu.uci.ics.asterix.aql.expression.DataverseDecl;
 import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
 import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
 import edu.uci.ics.asterix.aql.expression.DistinctClause;
 import edu.uci.ics.asterix.aql.expression.DropStatement;
 import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -284,12 +282,6 @@
     }
 
     @Override
-    public void visit(DieClause lc, Integer step) throws AsterixException {
-        out.println(skip(step) + "Limit");
-        lc.getDieExpr().accept(this, step + 1);
-    }
-
-    @Override
     public void visit(FunctionDecl fd, Integer step) throws AsterixException {
         out.println(skip(step) + "FunctionDecl " + fd.getSignature().getName() + "(" + fd.getParamList().toString()
                 + ") {");
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java
index 6c8b844..1f8e980 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java
@@ -10,7 +10,6 @@
 import edu.uci.ics.asterix.aql.expression.DataverseDecl;
 import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
 import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
 import edu.uci.ics.asterix.aql.expression.DistinctClause;
 import edu.uci.ics.asterix.aql.expression.DropStatement;
 import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -141,7 +140,7 @@
     R visitSetStatement(SetStatement ss, T arg) throws AsterixException;
 
     R visitBeginFeedStatement(BeginFeedStatement bf, T arg) throws AsterixException;
-    
+
     R visitControlFeedStatement(ControlFeedStatement del, T arg) throws AsterixException;
 
     R visitCallExpr(CallExpr pf, T arg) throws AsterixException;
@@ -150,8 +149,6 @@
 
     R visitWriteFromQueryResultStatement(WriteFromQueryResultStatement stmtLoad, T arg) throws AsterixException;
 
-    R visitDieClause(DieClause stmtLoad, T arg) throws AsterixException;
-
     R visit(CreateFunctionStatement cfs, T arg) throws AsterixException;
 
     R visitFunctionDropStatement(FunctionDropStatement del, T arg) throws AsterixException;
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java
index d4891ac..ff04032 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java
@@ -10,7 +10,6 @@
 import edu.uci.ics.asterix.aql.expression.DataverseDecl;
 import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
 import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
 import edu.uci.ics.asterix.aql.expression.DistinctClause;
 import edu.uci.ics.asterix.aql.expression.DropStatement;
 import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -143,13 +142,11 @@
     void visit(DataverseDropStatement stmtDel, T arg) throws AsterixException;
 
     void visit(TypeDropStatement stmtDel, T arg) throws AsterixException;
-    
+
     void visit(BeginFeedStatement stmtDel, T arg) throws AsterixException;
 
     void visit(ControlFeedStatement stmtDel, T arg) throws AsterixException;
 
-    void visit(DieClause stmtDel, T arg) throws AsterixException;
-
     void visit(CreateFunctionStatement cfs, T arg) throws AsterixException;
 
     void visit(FunctionDropStatement fds, T arg) throws AsterixException;
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java
index d295ffb..f6dec81 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java
@@ -20,7 +20,6 @@
 import edu.uci.ics.asterix.aql.expression.DataverseDecl;
 import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
 import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
 import edu.uci.ics.asterix.aql.expression.DistinctClause;
 import edu.uci.ics.asterix.aql.expression.DropStatement;
 import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -350,12 +349,6 @@
         }
 
         @Override
-        public Void visitDieClause(DieClause lc, Void arg) throws AsterixException {
-            lc.getDieExpr().accept(this, arg);
-            return null;
-        }
-
-        @Override
         public Void visitListConstructor(ListConstructor lc, Void arg) throws AsterixException {
             for (Expression e : lc.getExprList()) {
                 e.accept(this, arg);
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java
index 5742ac6..cbd15a2 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java
@@ -17,7 +17,6 @@
 import edu.uci.ics.asterix.aql.expression.DataverseDecl;
 import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
 import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
 import edu.uci.ics.asterix.aql.expression.DistinctClause;
 import edu.uci.ics.asterix.aql.expression.DropStatement;
 import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -187,7 +186,7 @@
         } else {
             // it is a var. from the context
             var = context.getRewrittenVar(v.getVar().getId());
-            if(var == null){
+            if (var == null) {
                 var = v.getVar();
             }
         }
@@ -257,14 +256,6 @@
     }
 
     @Override
-    public Pair<IAqlExpression, List<VariableSubstitution>> visitDieClause(DieClause lc, List<VariableSubstitution> arg)
-            throws AsterixException {
-        Pair<IAqlExpression, List<VariableSubstitution>> p1 = lc.getDieExpr().accept(this, arg);
-        DieClause c = new DieClause((Expression) p1.first);
-        return new Pair<IAqlExpression, List<VariableSubstitution>>(c, arg);
-    }
-
-    @Override
     public Pair<IAqlExpression, List<VariableSubstitution>> visitListConstructor(ListConstructor lc,
             List<VariableSubstitution> arg) throws AsterixException {
         List<Expression> oldExprList = lc.getExprList();
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java
index f178d88..9834f27 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java
@@ -18,7 +18,6 @@
 import edu.uci.ics.asterix.aql.expression.DataverseDecl;
 import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
 import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
 import edu.uci.ics.asterix.aql.expression.DistinctClause;
 import edu.uci.ics.asterix.aql.expression.DropStatement;
 import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -104,15 +103,15 @@
     public Boolean visitRecordConstructor(RecordConstructor rc, List<FunctionDecl> arg) throws AsterixException {
         boolean changed = false;
         for (FieldBinding b : rc.getFbList()) {
-        	Pair<Boolean, Expression> leftExprInlined = inlineUdfsInExpr(b.getLeftExpr(), arg);
-        	b.setLeftExpr(leftExprInlined.second);
-        	changed = changed | leftExprInlined.first;
-        	Pair<Boolean, Expression> rightExprInlined = inlineUdfsInExpr(b.getRightExpr(), arg);
-        	b.setRightExpr(rightExprInlined.second);
-        	changed = changed | rightExprInlined.first;
-        	
-        	/*
-        	if (b.getLeftExpr().accept(this, arg)) {
+            Pair<Boolean, Expression> leftExprInlined = inlineUdfsInExpr(b.getLeftExpr(), arg);
+            b.setLeftExpr(leftExprInlined.second);
+            changed = changed | leftExprInlined.first;
+            Pair<Boolean, Expression> rightExprInlined = inlineUdfsInExpr(b.getRightExpr(), arg);
+            b.setRightExpr(rightExprInlined.second);
+            changed = changed | rightExprInlined.first;
+
+            /*
+            if (b.getLeftExpr().accept(this, arg)) {
                 changed = true;
             }
             if (b.getRightExpr().accept(this, arg)) {
@@ -251,13 +250,6 @@
     }
 
     @Override
-    public Boolean visitDieClause(DieClause lc, List<FunctionDecl> arg) throws AsterixException {
-        Pair<Boolean, Expression> p1 = inlineUdfsInExpr(lc.getDieExpr(), arg);
-        lc.setDieExpr(p1.second);
-        return p1.first;
-    }
-
-    @Override
     public Boolean visitUnaryExpr(UnaryExpr u, List<FunctionDecl> arg) throws AsterixException {
         return u.getExpr().accept(this, arg);
     }
diff --git a/asterix-aql/src/main/javacc/AQL.jj b/asterix-aql/src/main/javacc/AQL.jj
index e234bb8..0cd3a13 100644
--- a/asterix-aql/src/main/javacc/AQL.jj
+++ b/asterix-aql/src/main/javacc/AQL.jj
@@ -74,33 +74,41 @@
     
     // data generator hints
     private static final String DGEN_HINT = "dgen";
+    
+    private static class IndexParams {
+      public IndexType type;
+      public int gramLength;
+      
+      public IndexParams(IndexType type, int gramLength) {
+        this.type = type;
+        this.gramLength = gramLength;
+      }
+    };  
    
     private static String getHint(Token t) {
-       if (t.specialToken == null) {
-         return null;
-       }       
-       String s = t.specialToken.image;
-       int n = s.length();
-       if (n < 2) {
-         return null;
-       }  
-       return s.substring(1).trim();
+        if (t.specialToken == null) {
+            return null;
+        }       
+        String s = t.specialToken.image;
+        int n = s.length();
+        if (n < 2) {
+            return null;
+        }  
+        return s.substring(1).trim();
     }
 
     public AQLParser(String s){
-		this(new StringReader(s));
-		super.setInput(s);
-	}
-	
-	public static void main(String args[]) throws ParseException, TokenMgrError, IOException, FileNotFoundException, AsterixException {
-			File file = new File(args[0]);
-			Reader fis = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8"));
-		    AQLParser parser = new AQLParser(fis);
-		    List<Statement> st = parser.Statement();
-		    //st.accept(new AQLPrintVisitor(), 0);
-	}
+        this(new StringReader(s));
+        super.setInput(s);
+    }
 
-
+    public static void main(String args[]) throws ParseException, TokenMgrError, IOException, FileNotFoundException, AsterixException {
+        File file = new File(args[0]);
+        Reader fis = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8"));
+        AQLParser parser = new AQLParser(fis);
+        List<Statement> st = parser.Statement();
+        //st.accept(new AQLPrintVisitor(), 0);
+    }
 }
 
 PARSER_END(AQLParser)
@@ -110,236 +118,563 @@
 {
   scopeStack.push(RootScopeFactory.createRootScope(this));
   List<Statement> decls = new ArrayList<Statement>();
-  Query query=null;
+  Statement stmt = null;
 }
 {
-    (
-      (
-        ( 
-          "use"
-            {
-              decls.add(DataverseDeclaration());
-            }           
-          | "declare" "function" { 
-                              decls.add(FunctionDeclaration()); 
-             }
-	  	   |  "create" (
-	  	   	  {
-                String hint = getHint(token);
-                boolean dgen = false;
-         	   	if (hint != null && hint.startsWith(DGEN_HINT)) {
-         	   	  dgen = true;
-         	   	}                  
-              } 
-              "type"     
-              	{ 
-                              decls.add(TypeDeclaration(dgen, hint)); 
-                 }
-              | "nodegroup" 
-              	{
-                              decls.add(NodegroupDeclaration());
-                }   
-              | "external" <DATASET>
-            	{   
-              		decls.add(DatasetDeclaration(DatasetType.EXTERNAL));
-            	}
-              | "feed" <DATASET>
-                {
-                   decls.add(DatasetDeclaration(DatasetType.FEED)); 	
-            	}
-              | <DATASET>
-          		{
-            		decls.add(DatasetDeclaration(DatasetType.INTERNAL));
-          		}
-              | "index" 
-              	{
-              				decls.add(CreateIndexStatement());
-                 }
-	          | "dataverse"
-          		{
-            		decls.add(CreateDataverseStatement());
-          		}
-          	  | "function"
-          	    {
-          	        decls.add(FunctionCreation());
-          	    }	
-            )        
-         	 | "load" {
-                       decls.add(LoadStatement());
-                   }  
-          	                    
-          	| "drop"
-			(
-          	<DATASET>
-          	{
-            		decls.add(DropStatement());
-          	}
-        	| "index"
-          	{
-            		decls.add(IndexDropStatement());
-          	}
-        	| "nodegroup"
-          	{
-            		decls.add(NodeGroupDropStatement());
-          	}
-        	| "type"
-          	{
-            		decls.add(TypeDropStatement());
-          	}
-        	| "dataverse"
-          	{
-            		decls.add(DataverseDropStatement());
-          	}
-          	| "function"
-          	{
-            		decls.add(FunctionDropStatement());
-          	}
-        	)
-          | "write" {
-                       decls.add(WriteStatement());
-                    }              
-          | "set" {
-                       decls.add(SetStatement());
-                    }                                                      
-	  	  | "insert" {
-		       decls.add(InsertStatement());
-			}
-          | "delete" {
-			decls.add(DeleteStatement());
-		    }
-          | "update" {
-	  		decls.add(UpdateStatement());		
-	  	    } 
-	  	  | "begin" "feed"  
-	  	      {
-                Pair<Identifier,Identifier> nameComponents = getDotSeparatedPair();
-                decls.add(new BeginFeedStatement(nameComponents.first, nameComponents.second, getVarCounter()));
-	  	      } ";"
-	  	      
-	  	  | "suspend" "feed"  
-	  	  	 {
-                decls.add(ControlFeedDeclaration(ControlFeedStatement.OperationType.SUSPEND));
-             } ";"
-	  	   | "resume" "feed"   {
-                decls.add(ControlFeedDeclaration(ControlFeedStatement.OperationType.RESUME));
-	  	   } ";"
-	  	   | "end" "feed"   {
-	  	        decls.add(ControlFeedDeclaration(ControlFeedStatement.OperationType.END));
-	  	   } ";" 
-	  	   | "alter" "feed"  {
-      	        decls.add(AlterFeedDeclaration());
-           } ";"
-           
-           | (query = Query()) {
-               decls.add(query);
-           }
-           )*
-          //  (query = Query())?
-      )
-
-      <EOF>
-    )
+  ( stmt = SingleStatement() (";") ?
     {
-     return decls;  
+      decls.add(stmt);
+    }
+  )*
+  <EOF>
+  {
+    return decls;  
+  }
+}
+
+Statement SingleStatement() throws ParseException:
+{
+  Statement stmt = null;
+}
+{
+  (
+    stmt = DataverseDeclaration()
+    | stmt = FunctionDeclaration()
+    | stmt = CreateStatement()
+    | stmt = LoadStatement()
+    | stmt = DropStatement()
+    | stmt = WriteStatement()
+    | stmt = SetStatement()
+    | stmt = InsertStatement()
+    | stmt = DeleteStatement()
+    | stmt = UpdateStatement()       
+    | stmt = FeedStatement()
+    | stmt = Query()
+  )
+  {
+    return stmt;
+  }
+}
+
+DataverseDecl DataverseDeclaration() throws ParseException:
+{
+  String dvName = null;
+}
+{
+  "use" "dataverse" dvName = Identifier()
+    {
+      defaultDataverse = dvName;
+      return new DataverseDecl(new Identifier(dvName));
+    }
+}
+
+Statement CreateStatement() throws ParseException:
+{
+  String hint = null;
+  boolean dgen = false;
+  Statement stmt = null;
+}
+{
+  "create"
+  (
+    {
+      hint = getHint(token);
+      if (hint != null && hint.startsWith(DGEN_HINT)) {
+        dgen = true;
+      }                  
+    }
+    stmt = TypeSpecification(hint, dgen)
+    | stmt = NodegroupSpecification()
+    | stmt = DatasetSpecification()    
+    | stmt = IndexSpecification()
+    | stmt = DataverseSpecification()
+    | stmt = FunctionSpecification()
+  )        
+  {
+    return stmt;
+  }
+}
+
+TypeDecl TypeSpecification(String hint, boolean dgen) throws ParseException:
+{
+  Pair<Identifier,Identifier> nameComponents = null;
+  boolean ifNotExists = false;
+  TypeExpression typeExpr = null;
+}
+{
+  "type" nameComponents = FunctionOrTypeName() ifNotExists = IfNotExists()
+  "as" typeExpr = TypeExpr()
+    {
+      long numValues = -1;
+      String filename = null;
+      if (dgen) {       
+        String splits[] = hint.split(" +");
+        if (splits.length != 3) {
+          throw new ParseException("Expecting /*+ dgen <filename> <numberOfItems> */");
+        } 
+        filename = splits[1];
+        numValues = Long.parseLong(splits[2]);
+      }  
+      TypeDataGen tddg = new TypeDataGen(dgen, filename, numValues);
+      return new TypeDecl(nameComponents.first, nameComponents.second, typeExpr, tddg, ifNotExists);
+    }
+}
+
+
+NodegroupDecl NodegroupSpecification() throws ParseException:
+{
+  String name = null;
+  String tmp = null;
+  boolean ifNotExists = false;
+  List<Identifier>ncNames = null;
+}
+{
+  "nodegroup" name = Identifier()
+  ifNotExists = IfNotExists() "on" tmp = Identifier()
+    {
+      ncNames = new ArrayList<Identifier>();
+      ncNames.add(new Identifier(tmp));
+    }
+  ( "," tmp = Identifier()
+    {
+      ncNames.add(new Identifier(tmp));
+    }
+  )*
+    {
+      return new NodegroupDecl(new Identifier(name), ncNames, ifNotExists);
+    }
+}
+
+DatasetDecl DatasetSpecification() throws ParseException:
+{
+  Pair<Identifier,Identifier> nameComponents = null;  
+  boolean ifNotExists = false;
+  String typeName = null;
+  String adapterName = null;
+  Map<String,String> properties = null;
+  FunctionSignature appliedFunction = null;
+  List<String> primaryKeyFields = null;
+  String nodeGroupName = null;
+  Map<String,String> hints = new HashMap<String,String>();  
+  DatasetDecl dsetDecl = null;
+}
+{
+  (
+    "external" <DATASET> nameComponents = QualifiedName()
+    <LEFTPAREN> typeName = Identifier() <RIGHTPAREN>
+    ifNotExists = IfNotExists()
+    "using" adapterName = AdapterName() properties = Configuration()
+    ( "hints" hints = Properties() )?
+      {
+        ExternalDetailsDecl edd = new ExternalDetailsDecl();
+        edd.setAdapter(adapterName);
+        edd.setProperties(properties);
+        dsetDecl = new DatasetDecl(nameComponents.first,
+                                   nameComponents.second,
+                                   new Identifier(typeName),
+                                   hints,
+                                   DatasetType.EXTERNAL,
+                                   edd,
+                                   ifNotExists);
+      } 
+
+    | "feed" <DATASET> nameComponents = QualifiedName()
+    <LEFTPAREN> typeName = Identifier() <RIGHTPAREN>
+    ifNotExists = IfNotExists()
+    "using" adapterName = AdapterName() properties = Configuration()
+    (appliedFunction = ApplyFunction())? primaryKeyFields = PrimaryKey()
+    ( "on" nodeGroupName = Identifier() )?
+    ( "hints" hints = Properties() )?
+      {
+        FeedDetailsDecl fdd = new FeedDetailsDecl(adapterName,
+                                                  properties,
+                                                  appliedFunction,
+                                                  nodeGroupName != null
+                                                    ? new Identifier(nodeGroupName)
+                                                    : null,
+                                                  primaryKeyFields);
+        dsetDecl = new DatasetDecl(nameComponents.first,
+                                   nameComponents.second,
+                                   new Identifier(typeName),
+                                   hints,
+                                   DatasetType.FEED,
+                                   fdd,
+                                   ifNotExists);
+      }
+    | <DATASET> nameComponents = QualifiedName()
+    <LEFTPAREN> typeName = Identifier() <RIGHTPAREN>
+    ifNotExists = IfNotExists()
+    primaryKeyFields = PrimaryKey() ("on" nodeGroupName = Identifier() )?
+    ( "hints" hints = Properties() )?
+      {
+        InternalDetailsDecl idd = new InternalDetailsDecl(nodeGroupName != null
+                                                            ? new Identifier(nodeGroupName)
+                                                            : null,
+                                                          primaryKeyFields);
+        dsetDecl = new DatasetDecl(nameComponents.first,
+                                   nameComponents.second,
+                                   new Identifier(typeName),
+                                   hints,
+                                   DatasetType.INTERNAL,
+                                   idd,
+                                   ifNotExists);
+      }
+  )
+    {
+      return dsetDecl;
+    }
+}
+
+CreateIndexStatement IndexSpecification() throws ParseException:
+{
+  CreateIndexStatement cis = new CreateIndexStatement();
+  String indexName = null;
+  String fieldExpr = null;
+  boolean ifNotExists = false;
+  Pair<Identifier,Identifier> nameComponents = null;
+  IndexParams indexType = null;
+}
+{
+  "index" indexName = Identifier()
+  ifNotExists = IfNotExists()
+  "on" nameComponents = QualifiedName()    
+  <LEFTPAREN> ( fieldExpr = Identifier()
+    {
+      cis.addFieldExpr(fieldExpr);
+    }
+  ) ("," fieldExpr = Identifier()
+    {
+      cis.addFieldExpr(fieldExpr);
+    }
+  )* <RIGHTPAREN> ( "type" indexType = IndexType() )?
+    {
+      cis.setIndexName(new Identifier(indexName));
+      cis.setIfNotExists(ifNotExists);
+      cis.setDataverseName(nameComponents.first);
+      cis.setDatasetName(nameComponents.second);
+      if (indexType != null) {
+        cis.setIndexType(indexType.type);
+        cis.setGramLength(indexType.gramLength);
+      }
+      return cis;
+    }
+}
+
+IndexParams IndexType() throws ParseException:
+{
+  IndexType type = null;
+  int gramLength = 0;
+}
+{
+  ("btree"
+    {
+      type = IndexType.BTREE;
+    }         
+  | "rtree"
+    {
+      type = IndexType.RTREE;
+    }
+  | "keyword"
+    {
+      type = IndexType.WORD_INVIX;
+    }
+  | "fuzzy keyword"
+    {
+      type = IndexType.FUZZY_WORD_INVIX;
+    }
+  | "ngram" <LEFTPAREN> <INTEGER_LITERAL>
+    {
+      type = IndexType.NGRAM_INVIX;
+      gramLength = Integer.valueOf(token.image);
+    }
+  <RIGHTPAREN>
+  | "fuzzy ngram" <LEFTPAREN> <INTEGER_LITERAL>
+    {
+      type = IndexType.FUZZY_NGRAM_INVIX;
+      gramLength = Integer.valueOf(token.image);
+    }
+  <RIGHTPAREN>)
+    {
+      return new IndexParams(type, gramLength);
+    }
+}
+
+CreateDataverseStatement DataverseSpecification() throws ParseException :
+{
+  String dvName = null;
+  boolean ifNotExists = false;
+  String format = null;
+}
+{
+  "dataverse" dvName = Identifier()
+  ifNotExists = IfNotExists()
+  ( "with format" format = StringLiteral() )?
+    {
+      return new CreateDataverseStatement(new Identifier(dvName), format, ifNotExists);
+    }
+}
+
+CreateFunctionStatement FunctionSpecification() throws ParseException:
+{
+  FunctionSignature signature;
+  boolean ifNotExists = false;
+  List<VarIdentifier> paramList = new ArrayList<VarIdentifier>();
+  String functionBody;
+  VarIdentifier var = null;
+  Expression functionBodyExpr;
+  Token beginPos;
+  Token endPos;
+  Pair<Identifier,Identifier> nameComponents=null;
+
+  createNewScope();
+}
+{
+  "function" nameComponents = FunctionOrTypeName()
+  ifNotExists = IfNotExists()
+  <LEFTPAREN> (<VARIABLE>
+    {
+      var = new VarIdentifier();
+      var.setValue(token.image);
+      paramList.add(var);
+      getCurrentScope().addNewVarSymbolToScope(var);
+    }
+  ("," <VARIABLE>
+    {
+      var = new VarIdentifier();
+      var.setValue(token.image);
+      paramList.add(var);
+      getCurrentScope().addNewVarSymbolToScope(var);
+    }
+  )*)? <RIGHTPAREN> "{"
+    {
+      beginPos = token;
+    } 
+  functionBodyExpr = Expression() "}"
+    {
+      endPos = token;
+      functionBody = extractFragment(beginPos.beginLine, beginPos.beginColumn, endPos.beginLine, endPos.beginColumn);
+      String dataverse = nameComponents.first.getValue();
+      String functionName = nameComponents.second.getValue();      
+      signature = new FunctionSignature(dataverse, functionName, paramList.size());
+      getCurrentScope().addFunctionDescriptor(signature, false);
+      return new CreateFunctionStatement(signature, paramList, functionBody, ifNotExists);
+    }
+}
+
+boolean IfNotExists() throws ParseException:
+{
+}
+{
+  ( "if not exists"
+    {
+      return true;
+    }
+  )?
+    {
+      return false;
+    }
+}
+
+FunctionSignature ApplyFunction() throws ParseException:
+{
+  FunctionSignature funcSig = null;
+}
+{
+  "apply" "function" funcSig = FunctionSignature()
+    {
+      return funcSig;
+    }
+}
+
+FunctionSignature FunctionSignature() throws ParseException:
+{
+  Pair<Identifier,Identifier> pairId = null;
+  int arity = 0;
+}
+{
+  pairId = FunctionOrTypeName() "@" <INTEGER_LITERAL> 
+    {  
+      arity = new Integer(token.image);
+      if (arity < 0 && arity != FunctionIdentifier.VARARGS) {
+        throw new ParseException(" invalid arity:" + arity);
+      }
+
+      String dataverse = pairId.first.getValue();
+      String functionName = pairId.second.getValue(); 
+      return new FunctionSignature(dataverse, functionName, arity);
+    }
+}
+
+List<String> PrimaryKey() throws ParseException:
+{
+  String tmp = null;
+  List<String> primaryKeyFields = new ArrayList<String>();
+}
+{
+  "primary" "key" tmp = Identifier()
+    {
+      primaryKeyFields.add(tmp);
+    }
+  ( "," tmp = Identifier()
+    {
+      primaryKeyFields.add(tmp);
+    }
+  )*
+    {
+      return primaryKeyFields;
+    }
+}
+
+Statement DropStatement() throws ParseException:
+{
+  String id = null;
+  Pair<Identifier,Identifier> pairId = null;
+  Triple<Identifier,Identifier,Identifier> tripleId = null;
+  FunctionSignature funcSig = null;
+  boolean ifExists = false;
+  Statement stmt = null;
+}
+{
+  "drop"
+  (
+    <DATASET> pairId = QualifiedName() ifExists = IfExists()
+      {
+        stmt = new DropStatement(pairId.first, pairId.second, ifExists);
+      }
+    | "index" tripleId = DoubleQualifiedName() ifExists = IfExists()
+      {
+        stmt = new IndexDropStatement(tripleId.first, tripleId.second, tripleId.third, ifExists);
+      }
+    | "nodegroup" id = Identifier() ifExists = IfExists()
+      {
+        stmt = new NodeGroupDropStatement(new Identifier(id), ifExists);
+      }
+    | "type" pairId = FunctionOrTypeName() ifExists = IfExists()
+      {
+        stmt = new TypeDropStatement(pairId.first, pairId.second, ifExists);
+      }
+    | "dataverse" id = Identifier() ifExists = IfExists()
+      {
+        stmt = new DataverseDropStatement(new Identifier(id), ifExists);
+      }
+    | "function" funcSig = FunctionSignature() ifExists = IfExists()
+      {
+        stmt = new FunctionDropStatement(funcSig, ifExists);
+      }
+  )
+  {
+    return stmt;  
+  }
+}
+
+boolean IfExists() throws ParseException :
+{
+}
+{
+  ( "if" "exists"
+    {
+      return true;
+    }
+  )?
+    {
+      return false;
     }
 }
 
 InsertStatement InsertStatement() throws ParseException:
 {
-	Identifier dataverseName;
-	Identifier datasetName;
-	Pair<Identifier,Identifier> nameComponents = null;
-	Query query;
+  Pair<Identifier,Identifier> nameComponents = null;
+  Query query;
 }
 {
-   "into" <DATASET>
-   
-   {
-    nameComponents = getDotSeparatedPair();
-    dataverseName = nameComponents.first;
-    datasetName = nameComponents.second;
-   }
-    
-    query = Query() (";")?
-   {return new InsertStatement(dataverseName, datasetName, query,  getVarCounter());}
+  "insert" "into" <DATASET> nameComponents = QualifiedName() query = Query()
+    {
+      return new InsertStatement(nameComponents.first, nameComponents.second, query, getVarCounter());
+    }
 }
 
 DeleteStatement DeleteStatement() throws ParseException:
 {
-	VariableExpr var = null;
-    Identifier dataverseName;
-    Identifier datasetName = null;
-	Expression condition = null;
-	Clause dieClause = null;
-	Pair<Identifier, Identifier> nameComponents;
+  VariableExpr var = null;
+  Expression condition = null;
+  Pair<Identifier, Identifier> nameComponents;
 }
 {
-   var = Variable() { getCurrentScope().addNewVarSymbolToScope(var.getVar());  }
-   "from" 
-   <DATASET> 
-   { 
-	  nameComponents  = getDotSeparatedPair();
-   }
-   ("where" condition = Expression())?  (dieClause = DieClause())? (";")?
-   {return new DeleteStatement(var, nameComponents.first, nameComponents.second, condition,  dieClause, getVarCounter()); }
+  "delete" var = Variable()
+    {
+      getCurrentScope().addNewVarSymbolToScope(var.getVar());
+    }
+  "from" <DATASET> nameComponents  = QualifiedName() 
+  ("where" condition = Expression())?
+    {
+      return new DeleteStatement(var, nameComponents.first, nameComponents.second, condition, getVarCounter());
+    }
 }
 
 UpdateStatement UpdateStatement() throws ParseException:
 {
-	VariableExpr vars;
-    Expression target;
-	Expression condition;
-	UpdateClause uc;
- 	List<UpdateClause> ucs = new ArrayList<UpdateClause>();
+  VariableExpr vars;
+  Expression target;
+  Expression condition;
+  UpdateClause uc;
+  List<UpdateClause> ucs = new ArrayList<UpdateClause>();
 }
 {
-   vars = Variable()  "in" target = Expression()
-	"where" condition = Expression() 
-	<LEFTPAREN> (uc=UpdateClause() {ucs.add(uc); }  ("," uc=UpdateClause() {ucs.add(uc); } )*) <RIGHTPAREN> ";"
-   {return new UpdateStatement(vars, target, condition, ucs);}
+  "update" vars = Variable() "in" target = Expression()
+  "where" condition = Expression() 
+  <LEFTPAREN> (uc = UpdateClause()
+    {
+      ucs.add(uc);
+    }
+  ("," uc = UpdateClause()
+    {
+      ucs.add(uc);
+    }
+  )*) <RIGHTPAREN>
+    {
+      return new UpdateStatement(vars, target, condition, ucs);
+    }
 }
 
-
-
 UpdateClause UpdateClause() throws ParseException:
 {
-	Expression target = null;
-	Expression value = null ;	
-	InsertStatement is = null;
-	DeleteStatement ds = null;
-	UpdateStatement us = null;
-	Expression condition = null;
-	UpdateClause ifbranch = null;
-	UpdateClause elsebranch = null;
+  Expression target = null;
+  Expression value = null ;	
+  InsertStatement is = null;
+  DeleteStatement ds = null;
+  UpdateStatement us = null;
+  Expression condition = null;
+  UpdateClause ifbranch = null;
+  UpdateClause elsebranch = null;
 }
 {
    "set" target = Expression() ":=" value = Expression() 
-   | "insert" is = InsertStatement()
-   | "delete" ds = DeleteStatement()
-   | "update" us = UpdateStatement()
-   | "if" <LEFTPAREN> condition = Expression() <RIGHTPAREN> "then" ifbranch = UpdateClause() [LOOKAHEAD(1) "else" elsebranch = UpdateClause()] 
-   {return new UpdateClause(target, value, is, ds, us, condition, ifbranch, elsebranch);}
+   | is = InsertStatement()
+   | ds = DeleteStatement()
+   | us = UpdateStatement()
+   | "if" <LEFTPAREN> condition = Expression() <RIGHTPAREN>
+     "then" ifbranch = UpdateClause()
+     [LOOKAHEAD(1) "else" elsebranch = UpdateClause()] 
+     {
+       return new UpdateClause(target, value, is, ds, us, condition, ifbranch, elsebranch);
+     }
 }
 
-
 Statement SetStatement() throws ParseException:
 {
   String pn = null;
-  Statement stmt = null;
+  String pv = null;
 }
 {
-  <IDENTIFIER>  { pn = token.image; }  
-  <STRING_LITERAL>
-    { String pv = removeQuotesAndEscapes(token.image); }
-    ";"
-  {
-    return new SetStatement(pn, pv);
-  }
+  "set" pn = Identifier() pv = StringLiteral()
+    {
+      return new SetStatement(pn, pv);
+    }
 }
 
 Statement WriteStatement() throws ParseException:
 {
-  Identifier nodeName = null;
+  String nodeName = null;
   String fileName = null;
   Statement stmt = null;
   Query query;
@@ -347,288 +682,27 @@
   Pair<Identifier,Identifier> nameComponents = null;
 }
 {
-  (( "output" "to" 
-    <IDENTIFIER> { nodeName = new Identifier(token.image); } 
-    ":" <STRING_LITERAL> { fileName = removeQuotesAndEscapes(token.image); }
-    ( "using" <STRING_LITERAL> { writerClass = removeQuotesAndEscapes(token.image); } )?
-         {                  
-             stmt = new WriteStatement(nodeName, fileName, writerClass);         
-         } )
-    |
-   ( "into" 
-     <DATASET> 
-     
-      {
-       nameComponents = getDotSeparatedPair();
+  "write" ((
+    "output" "to" nodeName = Identifier() ":" fileName = StringLiteral()
+    ( "using" writerClass = StringLiteral() )?
+      {                  
+        stmt = new WriteStatement(new Identifier(nodeName), fileName, writerClass);         
       }
-     
-     <LEFTPAREN> query = Query() <RIGHTPAREN>
-     {
+  ) | (
+    "into" <DATASET> 
+      {
+        nameComponents = QualifiedName();
+      }
+    <LEFTPAREN> query = Query() <RIGHTPAREN>
+      {
         stmt = new WriteFromQueryResultStatement(nameComponents.first, nameComponents.second, query, getVarCounter());
-     } ))  
-        
-    ";"
+      }
+  ))
     {
       return stmt;
     }
 }
 
-CreateIndexStatement CreateIndexStatement() throws ParseException:
-{
-  CreateIndexStatement cis = new CreateIndexStatement();
-  Pair<Identifier,Identifier> nameComponents = null;
-}
-{
-  <IDENTIFIER> { cis.setIndexName(new Identifier(token.image)); }
-  (
-    "if not exists"
-    {
-      cis.setIfNotExists(true);
-    }
-  )?
-  "on"  
-  
-   {
-   nameComponents = getDotSeparatedPair();
-   cis.setDataverseName(nameComponents.first);
-   cis.setDatasetName(nameComponents.second);
-   }
-  
-  <LEFTPAREN>
-  	( <IDENTIFIER> { cis.addFieldExpr(token.image); } )
-  	("," <IDENTIFIER> { cis.addFieldExpr(token.image); })*
-  <RIGHTPAREN>
-    ("type"
-  		("btree" { cis.setIndexType(IndexType.BTREE); }  		
-  		| "rtree" { cis.setIndexType(IndexType.RTREE); }
-  		| "keyword" { cis.setIndexType(IndexType.WORD_INVIX); }
-  		| "fuzzy keyword" { cis.setIndexType(IndexType.FUZZY_WORD_INVIX); }
-  		| "ngram"
-  		  <LEFTPAREN>
-  		  (<INTEGER_LITERAL>
-  		    {
-  		      cis.setIndexType(IndexType.NGRAM_INVIX);
-  		      cis.setGramLength(Integer.valueOf(token.image));
-  		    }
-  		  )
-  		  <RIGHTPAREN>
-  		| "fuzzy ngram"
-  		  <LEFTPAREN>
-  		  (<INTEGER_LITERAL>
-  		    {
-  		      cis.setIndexType(IndexType.FUZZY_NGRAM_INVIX);
-  		      cis.setGramLength(Integer.valueOf(token.image));
-  		    }
-  		  )
-  		  <RIGHTPAREN>
-		)
-  	";"  	
-  	| ";"
-    )
-   {
-     return cis;
-   }
-}
-
-DataverseDecl DataverseDeclaration() throws ParseException:
-{
-  Identifier dvName = null;  
-}
-{
-  "dataverse" <IDENTIFIER> { defaultDataverse = token.image;}
-  ";"
-  {
-    return new DataverseDecl(new Identifier(defaultDataverse));
-  }
-}
-
-DropStatement DropStatement() throws ParseException :
-{
-  Identifier dataverseName = null;
-  Identifier datasetName = null;
-  boolean ifExists = false;
-  Pair<Identifier,Identifier> nameComponents=null;
-}
-{
-   {
-   nameComponents = getDotSeparatedPair();
-   dataverseName = nameComponents.first;
-   datasetName = nameComponents.second;
-   }
-   
-   
-  (
-    "if exists"
-    {
-      ifExists = true;
-    }
-  )? ";"
-  {
-    return new DropStatement(dataverseName, datasetName, ifExists);
-  }
-}
-
-IndexDropStatement IndexDropStatement() throws ParseException :
-{
-  Identifier dataverseName = null;
-  Identifier datasetName = null;
-  Identifier indexName = null;
-  boolean ifExists = false;
-  Triple<Identifier,Identifier,Identifier> nameComponents=null;
-}
-{
-  
-  {
-   nameComponents = getDotSeparatedTriple();
-   dataverseName = nameComponents.first;
-   datasetName = nameComponents.second;
-   indexName = nameComponents.third;
-   }
-  
-  (
-    "if exists"
-    {
-      ifExists = true;
-    }
-  )? ";"
-  {
-    return new IndexDropStatement(dataverseName, datasetName, indexName, ifExists);
-  }
-}
-
-NodeGroupDropStatement NodeGroupDropStatement() throws ParseException :
-{
-  Identifier groupName = null;
-  boolean ifExists = false;
-}
-{
-  < IDENTIFIER >
-  {
-    groupName = new Identifier(token.image);
-  }
-  (
-    "if exists"
-    {
-      ifExists = true;
-    }
-  )? ";"
-  {
-    return new NodeGroupDropStatement(groupName, ifExists);
-  }
-}
-
-TypeDropStatement TypeDropStatement() throws ParseException :
-{
-  Identifier dataverseName = null;
-  Identifier typeName = null;
-  boolean ifExists = false;
-  Pair<Identifier,Identifier> nameComponents;
-}
-{
-  {
-    nameComponents = getDotSeparatedPair();
-    dataverseName = nameComponents.first == null ? new Identifier(defaultDataverse) : nameComponents.first;
-    typeName = nameComponents.second;
-  }
-  (
-    "if exists"
-    {
-      ifExists = true;
-    }
-  )? ";"
-  {
-    return new TypeDropStatement(dataverseName, typeName, ifExists);
-  }
-}
-
-DataverseDropStatement DataverseDropStatement() throws ParseException :
-{
-  Identifier dataverseName = null;
-  boolean ifExists = false;
-}
-{
-  < IDENTIFIER >
-  {
-    dataverseName = new Identifier(token.image);
-  }
-  (
-    "if exists"
-    {
-      ifExists = true;
-    }
-  )? ";"
-  {
-    return new DataverseDropStatement(dataverseName, ifExists);
-  }
-}
-
-CreateDataverseStatement CreateDataverseStatement() throws ParseException :
-{
-  Identifier dvName = null;
-  boolean ifNotExists = false;
-  String format = null;
-}
-{
-  < IDENTIFIER >
-  {
-    dvName = new Identifier(token.image);
-  }
-  (
-    "if not exists"
-    {
-      ifNotExists = true;
-    }
-  )?
-  (
-    "with format" < STRING_LITERAL >
-    {
-      format = removeQuotesAndEscapes(token.image);
-    }
-  )?
-  ";"
-  {
-    return new CreateDataverseStatement(dvName, format, ifNotExists);
-  }
-}
-
-
-FunctionDropStatement FunctionDropStatement() throws ParseException :
-{
-  String dataverse;
-  String functionName;
-  int arity=0;
-  boolean ifExists = false;
-  Pair<Identifier, Identifier> nameComponents=null;
-}
-{
-  {
-     nameComponents = getDotSeparatedPair();
-     dataverse = nameComponents.first != null ? nameComponents.first.getValue() : defaultDataverse;
-     functionName = nameComponents.second.getValue(); 
-  }
-  
-   "@"
-  <INTEGER_LITERAL> 
-  {  
-     Token t= getToken(0);
-	 arity = new Integer(t.image);
-  	 if( arity < 0 && arity != FunctionIdentifier.VARARGS){
-  	 	throw new ParseException(" invalid arity:" + arity);
-  	 } 
-  }
-  
-  (
-    "if exists"
-    {
-      ifExists = true;
-    }
-  )? ";"
-  {
-    return new FunctionDropStatement(new FunctionSignature(dataverse, functionName, arity), ifExists);
-  }
-}
-
-
 LoadFromFileStatement LoadStatement() throws ParseException:
 {
   Identifier dataverseName = null;
@@ -639,492 +713,138 @@
   Pair<Identifier,Identifier> nameComponents = null;
 }
 {
-   <DATASET> 
-   {
-   nameComponents = getDotSeparatedPair();
-   dataverseName = nameComponents.first;
-   datasetName = nameComponents.second;
-   }
-   
-   "using"
-  
+  "load" <DATASET> nameComponents = QualifiedName()
     {
-    	adapterName = getAdapterName();
+      dataverseName = nameComponents.first;
+      datasetName = nameComponents.second;
     }
-   
+  "using" adapterName = AdapterName() properties = Configuration()
+  ("pre-sorted" 
     {
-      properties = getConfiguration();
-    }
-  
-    ("pre-sorted" 
-      {  alreadySorted = true; }
-    )?
-        
-  ";"
-  {
-     return new LoadFromFileStatement(dataverseName, datasetName, adapterName, properties, alreadySorted);
-  }   
-}
-
-
-String getAdapterName() throws ParseException :
-{
-	String adapterName = null;
-}
-{
-    ( 
-      <IDENTIFIER> {
-     	adapterName = (new Identifier(token.image)).getValue();; 
-      }
-      | 
-      <STRING_LITERAL>
-      {
-        adapterName = removeQuotesAndEscapes(token.image);
-      }
-    )
-    {
-	return adapterName;
-	}
-}
-
-
-DatasetDecl DatasetDeclaration(DatasetType datasetType) throws ParseException :
-{
-  DatasetDecl dd = null;
-  Identifier datasetName = null;
-  Identifier dataverseName = null;
-  Identifier itemDataverseName = null;
-  Identifier itemTypeName = null;
-  String nameComponentFirst = null;
-  String nameComponentSecond = null;
-  boolean ifNotExists = false;
-  IDatasetDetailsDecl datasetDetails = null;
-  Pair<Identifier,Identifier> nameComponents = null;
-  Map<String,String> hints = new HashMap<String,String>();	
-}
-{
-  {
-   nameComponents = getDotSeparatedPair();
-   dataverseName = nameComponents.first;
-   datasetName = nameComponents.second;
-   }
-    
-  (
-    "if not exists"
-    {
-      ifNotExists = true;
+      alreadySorted = true;
     }
   )?
-  (
-  	< LEFTPAREN > <IDENTIFIER>
-  	{
-    	itemTypeName = new Identifier(token.image);
-  	}
-  	< RIGHTPAREN >
-  )
-  {
-  	  if(datasetType == DatasetType.INTERNAL) {
-      	datasetDetails = InternalDatasetDeclaration();
-      }
-      else if(datasetType == DatasetType.EXTERNAL) {
-      	datasetDetails = ExternalDatasetDeclaration();
-      }
-      else if(datasetType == DatasetType.FEED) {
-      	datasetDetails = FeedDatasetDeclaration();
-      }
-  }
-  
-  (
-  "hints"
-  {
-      initProperties(hints);
-  }
-  )?
-   ";"
- 
-  {
-   dd = new DatasetDecl(dataverseName, datasetName, itemTypeName, hints, datasetType, datasetDetails,ifNotExists);
-   return dd;
-  }
+    {
+      return new LoadFromFileStatement(dataverseName, datasetName, adapterName, properties, alreadySorted);
+    }   
 }
 
-InternalDetailsDecl InternalDatasetDeclaration() throws ParseException :
-{
-    InternalDetailsDecl idd = null;
-    List<String> primaryKeyFields = new ArrayList<String>();
-    Identifier nodeGroupName=null;
-}
-{
-  (
-    {
-  	  primaryKeyFields = getPrimaryKeyFields();
-    }
-  )
-  
-  (
-  "on" < IDENTIFIER >
-    {
-        nodeGroupName = new Identifier(token.image);
-    }
-  )?
-  
-  {
-    idd = new InternalDetailsDecl(nodeGroupName, primaryKeyFields);
-    return idd;
-  }
-}
 
-ExternalDetailsDecl ExternalDatasetDeclaration() throws ParseException :
+String AdapterName() throws ParseException :
 {
-  ExternalDetailsDecl edd = null;
   String adapterName = null;
-  Map < String, String > properties;
 }
 {
-  {
-    edd = new ExternalDetailsDecl();
-  }
- 
-    "using"
+  adapterName = Identifier()
     {
-    	adapterName = getAdapterName();
+	  return adapterName;
     }
-
-    {
-      properties = getConfiguration();
-    }
-
-    {
-    	  edd = new ExternalDetailsDecl();
-		  edd.setAdapter(adapterName);
-   		  edd.setProperties(properties);
-    } 
- 
-  {
-    return edd;
-  }
 }
 
-FeedDetailsDecl FeedDatasetDeclaration() throws ParseException :
-{
-    FeedDetailsDecl fdd = null;
-    String adapterName = null;
-    Map < String, String > properties;
-	Pair<Identifier,Identifier> nameComponents;
-	List<String> primaryKeyFields = new ArrayList<String>();
-    Identifier nodeGroupName=null;
-    FunctionSignature appliedFunction=null;
-	String dataverse;
-	String functionName;
-	String libraryName;
-	int arity;
-	Triple<String,String,String> functionCallComponents = null;
-}
-{
-   "using"
-    {
-    	adapterName = getAdapterName();
-    }
-
-    {
-      properties = getConfiguration();
-    }
-  
-  ("apply" "function" 
-   (
-      ( functionCallComponents = getFunctionCallComponents() )
-       {
-         dataverse = functionCallComponents.first == null ? defaultDataverse : functionCallComponents.first;
-         libraryName = functionCallComponents.second;
-         functionName = functionCallComponents.third;
-         if(libraryName != null){
-          functionName = libraryName + ":" + functionName;
-         }
-       }
-     )
-     
-  ("@" <INTEGER_LITERAL> 
-     {
-        arity = Integer.parseInt(token.image);
-     }
-  )
-  
-  {
-    appliedFunction = new FunctionSignature(dataverse, functionName, arity);
-  }   
-  )?
-  
-  (
-    {
-  	  primaryKeyFields  = getPrimaryKeyFields();
-    }
-  )
-  
-  (
-  "on" < IDENTIFIER >
-  {
-    	nodeGroupName = new Identifier(token.image);
-  }
-  )?
-  
-  {
-    fdd = new FeedDetailsDecl(adapterName, properties, appliedFunction, nodeGroupName, primaryKeyFields);
-    return fdd;
-  }
-}
-
-List<String> getPrimaryKeyFields()  throws ParseException :
-{
-	List<String> primaryKeyFields = new ArrayList<String>();
-}
-{
-
-  "primary" "key"
-  < IDENTIFIER >
-  {
-    	 primaryKeyFields.add(token.image);
-  }
-  (
-    "," < IDENTIFIER >
-    {
-    	 primaryKeyFields.add(token.image);
-    }
-  )*
-  {
-   	return primaryKeyFields;
-  }
-  
-}
-
-
-
-
-
-ControlFeedStatement ControlFeedDeclaration(ControlFeedStatement.OperationType operationType) throws ParseException :
+Statement FeedStatement() throws ParseException:
 {
   Pair<Identifier,Identifier> nameComponents = null;
+  Map<String,String> configuration = null;
+  Statement stmt = null;
 }
 {
+  (
+    "begin" "feed" nameComponents = QualifiedName()
+      {
+        stmt = new BeginFeedStatement(nameComponents.first, nameComponents.second, getVarCounter());
+      }
+    | "suspend" "feed" nameComponents = QualifiedName()
+      {
+        stmt = new ControlFeedStatement(ControlFeedStatement.OperationType.SUSPEND, nameComponents.first, nameComponents.second);
+      }
+    | "resume" "feed" nameComponents = QualifiedName()
+      {
+        stmt = new ControlFeedStatement(ControlFeedStatement.OperationType.RESUME, nameComponents.first, nameComponents.second);
+      }
+    | "end" "feed" nameComponents = QualifiedName()
+      {
+        stmt = new ControlFeedStatement(ControlFeedStatement.OperationType.END, nameComponents.first, nameComponents.second);
+      }
+    | "alter" "feed" nameComponents = QualifiedName() "set" configuration = Configuration()
+      {
+        stmt = new ControlFeedStatement(ControlFeedStatement.OperationType.ALTER, nameComponents.first, nameComponents.second, configuration);
+      }
+  )
     {
-    nameComponents = getDotSeparatedPair();
-    return new ControlFeedStatement(operationType, nameComponents.first, nameComponents.second);
+      return stmt;
     }
 }
 
-
-ControlFeedStatement AlterFeedDeclaration() throws ParseException :
-{
-    Pair<Identifier,Identifier> nameComponents = null;
-    Map < String, String > configuration = new HashMap < String, String > ();
-}
-{
-   {
-    nameComponents = getDotSeparatedPair();
-   }
-
-   "set"
-   { 
-   configuration = getConfiguration();
-   }
-  
-  {
-    return new ControlFeedStatement(ControlFeedStatement.OperationType.ALTER, nameComponents.first, nameComponents.second, configuration);
-  }
-}
-
-Map<String,String> getConfiguration()  throws ParseException :
+Map<String,String> Configuration()  throws ParseException :
 {
 	Map<String,String> configuration = new LinkedHashMap<String,String>();
-	String key;
-	String value;
+	Pair<String, String> keyValuePair = null;
 }
 {
-
-<LEFTPAREN>
-    (
-      (
-        <LEFTPAREN>
-        (
-          <STRING_LITERAL>
-          {
-            key = removeQuotesAndEscapes(token.image);
-          }
-          "=" <STRING_LITERAL>
-          {
-            value = removeQuotesAndEscapes(token.image);
-          }
-        )
-        <RIGHTPAREN>
-        {
-          configuration.put(key, value);
-        }
-      )
-      (
-        "," <LEFTPAREN>
-        (
-          <STRING_LITERAL>
-          {
-            key = removeQuotesAndEscapes(token.image);
-          }
-          "=" <STRING_LITERAL>
-          {
-            value = removeQuotesAndEscapes(token.image);
-          }
-        )
-        <RIGHTPAREN>
-        {
-          configuration.put(key, value);
-        }
-      )*
-    )?
-    <RIGHTPAREN>
-     {
-     	return configuration;
-     }
-}
-
-void initProperties(Map<String,String> properties)  throws ParseException :
-{
-	String key;
-	String value;
-}
-{
-    (
-      <LEFTPAREN>
-        (
-          <IDENTIFIER>
-          {
-            key = (new Identifier(token.image)).getValue();
-          }
-          "=" 
-          (
-            (<STRING_LITERAL>
-             {
-              value = removeQuotesAndEscapes(token.image);
-             }
-            ) |
-            (<INTEGER_LITERAL>
-             {
-             try{
-              value = "" + Long.valueOf(token.image);
-              } catch (NumberFormatException nfe){
-                  throw new ParseException("inapproriate value: " + token.image); 
-              }
-             } 
-            )
-          )
-        {
-          properties.put(key.toUpperCase(), value);
-        }
-       ( 
-        "," 
-        (
-          <IDENTIFIER>
-          {
-            key = (new Identifier(token.image)).getValue();
-          }
-          "=" 
-          (
-           (<STRING_LITERAL>
-            {
-              value = removeQuotesAndEscapes(token.image);
-            }
-           ) |
-           (<INTEGER_LITERAL>
-            {
-              try{
-                value = "" + Long.valueOf(token.image);
-              } catch (NumberFormatException nfe){
-              	throw new ParseException("inapproriate value: " + token.image); 
-              }
-            } 
-           )
-          ) 
-        )
-        {
-          properties.put(key.toUpperCase(), value);
-        }
-        
-       )*
-      )
-       <RIGHTPAREN>
-    )?
-}
-
-
-
-NodegroupDecl NodegroupDeclaration() throws ParseException :
-{
-  Identifier name = null;
-  List < Identifier > ncNames = new ArrayList < Identifier > ();
-  boolean ifNotExists = false;
-}
-{
-  < IDENTIFIER >
-  {
-    name = new Identifier(token.image);
-  }
-  (
-    "if not exists"
-    { 
-      ifNotExists = true;
-    }
-  )?
-  "on" < IDENTIFIER >
-  {
-    ncNames.add(new Identifier(token.image));
-  }
-  (
-    "," < IDENTIFIER >
+  <LEFTPAREN> ( keyValuePair = KeyValuePair()
     {
-      ncNames.add(new Identifier(token.image));
+      configuration.put(keyValuePair.first, keyValuePair.second);
     }
-  )*
-  ";"
-  {
-    return new NodegroupDecl(name, ncNames, ifNotExists);
-  }
-}
-
-
-TypeDecl TypeDeclaration(boolean dgen, String hint) throws ParseException:
-{
-  Identifier dataverse;
-  Identifier ident;
-  TypeExpression typeExpr;
-  boolean ifNotExists = false;
-  Pair<Identifier,Identifier> nameComponents=null;	
-}
-{
-  {
-    nameComponents = getDotSeparatedPair();
-    dataverse = nameComponents.first;
-    ident = nameComponents.second; 
-  }
-  
-  (
-    "if not exists"
+  ( "," keyValuePair = KeyValuePair()
     {
-      ifNotExists = true;
+      configuration.put(keyValuePair.first, keyValuePair.second);
     }
-  )?
-  "as"
-  ( typeExpr = TypeExpr() )
-  (";")?
-  {
-    long numValues = -1;
-    String filename = null;
-    if (dgen) {       
-      String splits[] = hint.split(" +");
-      if (splits.length != 3) {
-        throw new ParseException("Expecting /*+ dgen <filename> <numberOfItems> */");
-      } 
-      filename = splits[1];
-      numValues = Long.parseLong(splits[2]);
+  )* )? <RIGHTPAREN>
+    {
+      return configuration;
+    }
+}
+
+Pair<String, String> KeyValuePair() throws ParseException:
+{
+  String key;
+  String value;
+}
+{
+  <LEFTPAREN> key = StringLiteral() "=" value = StringLiteral() <RIGHTPAREN>
+    {
+      return new Pair<String, String>(key, value);
     }  
-    TypeDataGen tddg = new TypeDataGen(dgen, filename, numValues);
-    return new TypeDecl(dataverse, ident, typeExpr, tddg, ifNotExists);
-  }
+}
+
+Map<String,String> Properties() throws ParseException:
+{
+  Map<String,String> properties = new HashMap<String,String>();
+  Pair<String, String> property;
+}
+{
+  ( <LEFTPAREN> property = Property()
+    {
+      properties.put(property.first, property.second);
+    }
+  ( "," property = Property() 
+    {
+      properties.put(property.first, property.second);
+    }
+  )* <RIGHTPAREN> )?
+    {
+      return properties;
+    }
+}
+
+Pair<String, String> Property() throws ParseException:
+{
+  String key;
+  String value;
+}
+{
+  key = Identifier() "=" ( value = StringLiteral() | <INTEGER_LITERAL>
+    {
+      try {
+        value = "" + Long.valueOf(token.image);
+      } catch (NumberFormatException nfe) {
+        throw new ParseException("inapproriate value: " + token.image); 
+      }
+    }
+  )
+    {
+      return new Pair<String, String>(key.toUpperCase(), value);
+    }  
 }
 
 TypeExpression TypeExpr() throws ParseException:
@@ -1191,11 +911,10 @@
         boolean nullable = false;
 }
 {
-      <IDENTIFIER>
+      fieldName = Identifier()
       	{
-	     Token t = getToken(0);
-	     fieldName = t.toString();	     	     
-         String hint = getHint(t);
+	     fieldName = token.image;	     	     
+         String hint = getHint(token);
          IRecordFieldDataGen rfdg = null;
          if (hint != null) { 
            String splits[] = hint.split(" +");
@@ -1247,14 +966,14 @@
 }
 
 TypeReferenceExpression TypeReference() throws ParseException:
-{}
 {
- <IDENTIFIER>
- {
-     Token t = getToken(0);
-     Identifier id = new Identifier(t.toString());
-     return new TypeReferenceExpression(id);
- }
+  String id = null;
+}
+{
+ id = Identifier()
+   {
+     return new TypeReferenceExpression(new Identifier(id));
+   }
 }
 
 OrderedListTypeDefinition OrderedListTypeDef() throws ParseException:
@@ -1284,68 +1003,91 @@
   }
 }
 
-Pair<Identifier,Identifier> getDotSeparatedPair() throws ParseException:
+
+Pair<Identifier,Identifier> FunctionOrTypeName() throws ParseException:
 {
- Identifier first = null;
- Identifier second = null;
+  Pair<Identifier,Identifier> name = null;
 }
 {
-  < IDENTIFIER >
+  name = QualifiedName()
+    {
+      if (name.first == null) {
+        name.first = new Identifier(defaultDataverse);
+      }
+      return name;
+    }
+}
+
+String Identifier() throws ParseException:
+{
+  String lit = null;
+}
+{
+  <IDENTIFIER>
+    {
+      return token.image;
+    }
+  | lit = StringLiteral()
+    {
+      return lit;
+    } 
+}
+
+String StringLiteral() throws ParseException:
+{
+}
+{
+  <STRING_LITERAL>
+    {
+      return removeQuotesAndEscapes(token.image);
+    }
+}
+
+Pair<Identifier,Identifier> QualifiedName() throws ParseException:
+{
+  String first = null;
+  String second = null;
+}
+{
+  first = Identifier() ("." second = Identifier())?
   {
-    first = new Identifier(token.image);
-  } 
-  ("." <IDENTIFIER>
-  {
-    second = new Identifier(token.image);
-  }
-  )?
-  
-  {
-   if(second == null){
-   	second = first;
-   	first = null;
-   } 
-   
-   return new Pair<Identifier,Identifier>(first,second);
+    Identifier id1 = null;
+    Identifier id2 = null;
+    if (second == null) {
+      id2 = new Identifier(first);
+    } else
+    {
+      id1 = new Identifier(first);
+      id2 = new Identifier(second);
+    }
+    return new Pair<Identifier,Identifier>(id1, id2);
   }  
 }  
   
-Triple<Identifier,Identifier,Identifier> getDotSeparatedTriple() throws ParseException:
+Triple<Identifier,Identifier,Identifier> DoubleQualifiedName() throws ParseException:
 {
- Identifier first = null;
- Identifier second = null;
- Identifier third = null;
+  String first = null;
+  String second = null;
+  String third = null;
 }
 {
-  < IDENTIFIER >
+  first = Identifier() "." second = Identifier() ("." third = Identifier())?
   {
-    first = new Identifier(token.image);
-  } 
-  "." <IDENTIFIER>
-  {
-    second = new Identifier(token.image);
-  }
-  (
-  "." <IDENTIFIER>
-  {
-    third = new Identifier(token.image);
-  }
-  )?
-  
-  {
-   if(third == null){
-   	third  = second;
-   	second = first;
-   	first = null;
-   } 
-   
-   return new Triple<Identifier,Identifier,Identifier>(first,second,third);
+    Identifier id1 = null;
+    Identifier id2 = null;
+    Identifier id3 = null;
+    if (third == null) {
+      id2 = new Identifier(first);
+      id3 = new Identifier(second);
+    } else {
+      id1 = new Identifier(first);
+      id2 = new Identifier(second);
+      id3 = new Identifier(third);
+    }
+    return new Triple<Identifier,Identifier,Identifier>(id1, id2, id3);
   }  
 }  
 
-
-  
-
 FunctionDecl FunctionDeclaration() throws ParseException:
 {
   FunctionDecl funcDecl;
@@ -1358,29 +1100,23 @@
   createNewScope();
 }
 {
-
-    <IDENTIFIER>
-	{
-	  Token t = getToken(0);
-	  functionName = t.toString();
-	}
-    <LEFTPAREN> (<VARIABLE>
+  "declare" "function" functionName = Identifier() <LEFTPAREN> (<VARIABLE>
     {
       var = new VarIdentifier();
-      var.setValue(getToken(0).toString());
+      var.setValue(token.image);
       paramList.add(var);
       getCurrentScope().addNewVarSymbolToScope(var);
       arity++;
     }
-    ("," <VARIABLE>
+  ("," <VARIABLE>
     {
       var = new VarIdentifier();
-      var.setValue(getToken(0).toString());
+      var.setValue(token.image);
       paramList.add(var);
       getCurrentScope().addNewVarSymbolToScope(var);
       arity++;
-    })*)? <RIGHTPAREN> "{" funcBody = Expression() "}"
-    (";")?
+    }
+  )*)? <RIGHTPAREN> "{" funcBody = Expression() "}"
     {
       signature = new FunctionSignature(defaultDataverse, functionName, arity);
       getCurrentScope().addFunctionDescriptor(signature, false);
@@ -1389,78 +1125,14 @@
     }
 }
 
-CreateFunctionStatement FunctionCreation() throws ParseException:
-{
-  CreateFunctionStatement cfs = null;
-  FunctionSignature signature;
-  String dataverse;
-  String functionName;
-  boolean ifNotExists = false;
-  List<VarIdentifier> paramList = new ArrayList<VarIdentifier>();
-  String functionBody;
-  VarIdentifier var = null;
-  createNewScope();
-  Expression functionBodyExpr;
-  Token beginPos;
-  Token endPos;
-  Pair<Identifier,Identifier> nameComponents=null;
-}
-{
-    {
-      nameComponents = getDotSeparatedPair();
-   	  dataverse = nameComponents.first != null ? nameComponents.first.getValue() : defaultDataverse;
-   	  functionName= nameComponents.second.getValue();
-	}
-	
-	(
-      "if not exists"
-       {
-         ifNotExists = true;
-       }
-    )?
-	
-    <LEFTPAREN> (<VARIABLE>
-    {
-      var = new VarIdentifier();
-      var.setValue(getToken(0).toString());
-      paramList.add(var);
-      getCurrentScope().addNewVarSymbolToScope(var);
-    }
-    ("," <VARIABLE>
-    {
-      var = new VarIdentifier();
-      var.setValue(getToken(0).toString());
-      paramList.add(var);
-      getCurrentScope().addNewVarSymbolToScope(var);
-    })*)? <RIGHTPAREN>  "{"
-          {
-          beginPos = getToken(0);
-          } 
-          functionBodyExpr = Expression() 
-          "}" 
-          {
-            endPos = getToken(0);
-            functionBody = extractFragment(beginPos.beginLine, beginPos.beginColumn, endPos.beginLine, endPos.beginColumn);
-          }
-          (";")?
-    {
-      signature = new FunctionSignature(dataverse, functionName, paramList.size());
-      getCurrentScope().addFunctionDescriptor(signature, false);
-      cfs = new CreateFunctionStatement(signature, paramList, functionBody, ifNotExists);
-      return cfs;
-    }
-}
 
-
-
-Query Query()throws ParseException:
+Query Query() throws ParseException:
 {
   Query query = new Query();
   Expression expr;
 }
 {
-    expr = Expression()
-    (";")?
+  expr = Expression()
     {
       query.setBody(expr);
       query.setVarCounter(getVarCounter());
@@ -1510,8 +1182,7 @@
   	    op.addOperand(operand);
 	    op.setCurrentop(true);  	      	     
   	  }  
-  	  Token t = getToken(0);
-      op.addOperator(t.toString());
+      op.addOperator(token.image);
 	}
 
 	operand = AndExpr()
@@ -1542,8 +1213,7 @@
   	    op.addOperand(operand);
 	    op.setCurrentop(true);  	      	     
   	  }  
-  	  Token t = getToken(0);
-      op.addOperator(t.toString());
+      op.addOperator(token.image);
 	}
 
 	operand = RelExpr()
@@ -1590,9 +1260,8 @@
   	      op.addOperand(operand, broadcast);
           op.setCurrentop(true);
           broadcast = false;
-  	    }   
-  	    Token t = getToken(0);
-        op.addOperator(t.toString());
+  	    }
+        op.addOperator(token.image);
 	  }
 	  
  	  operand = AddExpr()
@@ -1630,9 +1299,8 @@
   	    op = new OperatorExpr();
         op.addOperand(operand);  	    
         op.setCurrentop(true);        
-  	  } 
-  	  Token t = getToken(0);
-	  ((OperatorExpr)op).addOperator(t.toString());
+  	  }
+	  ((OperatorExpr)op).addOperator(token.image);
 	}
 
 	operand = MultExpr()
@@ -1660,9 +1328,8 @@
   	    op = new OperatorExpr();
         op.addOperand(operand);
         op.setCurrentop(true);          	    
-  	  } 
-  	  Token t = getToken(0);
-	  op.addOperator(t.toString());
+  	  }
+	  op.addOperator(token.image);
 	}
 	operand = UnionExpr()
 	{
@@ -1704,11 +1371,10 @@
 {
 	(( "+"|"-") 
 	{
-	  	uexpr = new UnaryExpr(); 
-		Token t = getToken(0);
-		if("+".equals(t.toString()))
+	  	uexpr = new UnaryExpr();
+		if("+".equals(token.image))
 			((UnaryExpr)uexpr).setSign(Sign.POSITIVE);
-		else if("-".equals(t.toString()))
+		else if("-".equals(token.image))
 			((UnaryExpr)uexpr).setSign(Sign.NEGATIVE);
 		else 
 			throw new ParseException();
@@ -1727,73 +1393,39 @@
 	}
 }
 
-Expression ValueExpr() throws ParseException:
-{
-  Expression expr;
-}
-{
-  expr = FieldOrIndexAccessor()
-  {
-    return expr;
-  }
-}
-
-
-Expression FieldOrIndexAccessor()throws ParseException:
+Expression ValueExpr()throws ParseException:
 {
   Expression expr = null;
   Identifier ident = null;
   AbstractAccessor fa = null;
   int index;
-
 }
 {
-	( expr = PrimaryExpr()
-
-	)
-
-
-	(
-	(
-	  	ident = Field()
+  expr = PrimaryExpr() ( ident = Field()
 	{
-		  if(fa == null)
-		  	fa = new FieldAccessor(expr, ident);
-		  else
-		  	fa = new FieldAccessor(fa, ident);
-	}
-	)
-	| (
-		index = Index()
-		{
-		  if(fa == null)
-		  	fa = new IndexAccessor(expr, index);
-		  else
-		  	fa = new IndexAccessor(fa, index);
-		}
-	) 
-	)*
-
-	
-  	{
- 	  return fa==null?expr:fa;
- 	}
+	  fa = (fa == null ? new FieldAccessor(expr, ident) 
+                       : new FieldAccessor(fa, ident));
+    }
+  | index = Index()
+    {
+      fa = (fa == null ? new IndexAccessor(expr, index)
+                       : new IndexAccessor(fa, index));
+     }
+  )*
+    {
+      return fa == null ? expr : fa;
+    }
 }
 
 Identifier Field() throws ParseException:
 {
-  Identifier ident = null;
-
+  String ident = null;
 }
 {
-  "." < IDENTIFIER >
-  	{
-    
-  	ident = new Identifier();
-	ident.setValue(getToken(0).toString());
-
-	  return ident;
-	}
+  "." ident = Identifier()
+    {
+      return new Identifier(ident);
+    }
 }
 
 int Index() throws ParseException:
@@ -1838,75 +1470,63 @@
   Expression expr = null;
 }
 {
-  //Literal | VariableRef | ListConstructor | RecordConstructor | FunctionCallExpr | DatasetAccessExpression | ParenthesizedExpression
-	(
-	  expr =Literal() 
-	   | expr = FunctionCallExpr()
-	   | expr = DatasetAccessExpression()
-	   | expr =VariableRef() 
-	   
+  ( LOOKAHEAD(2) 
+    expr = FunctionCallExpr()
+  | expr = Literal()
+  | expr = DatasetAccessExpression()
+  | expr = VariableRef() 
     {
       if(((VariableExpr)expr).getIsNewVar() == true)
-      	throw new ParseException("can't find variable " + ((VariableExpr)expr).getVar());
+        throw new ParseException("can't find variable " + ((VariableExpr)expr).getVar());
     }
-    	   | expr = ListConstructor()
-	   | expr = RecordConstructor()
-	   | expr = ParenthesizedExpression()
-	)
- 	{
- 	  return expr;
- 	}
+  | expr = ListConstructor()
+  | expr = RecordConstructor()
+  | expr = ParenthesizedExpression()
+  )
+    {
+      return expr;
+    }
 }
 
 Expression Literal() throws ParseException:
 {
-
   LiteralExpr lit = new LiteralExpr();
-  Token t;
+  String str = null;
 }
 {
-(
-      <STRING_LITERAL>
-	{
-	  t= getToken(0);
-	  lit.setValue( new StringLiteral(removeQuotesAndEscapes(t.image)));
-	}
-    
- 	 | <INTEGER_LITERAL>
+  ( str = StringLiteral()
     {
-      t= getToken(0);
-	  try {
-	      lit.setValue(new IntegerLiteral(new Integer(t.image)));
-	  } catch(NumberFormatException ex) {
-	      lit.setValue(new LongIntegerLiteral(new Long(t.image)));
-	  }
-	}
-     | < FLOAT_LITERAL >
+      lit.setValue(new StringLiteral(str));
+    }
+  | <INTEGER_LITERAL>
     {
-      t= getToken(0);
-      lit.setValue(new FloatLiteral(new Float(t.image)));
-    }     
-	 | < DOUBLE_LITERAL >
+      try {
+        lit.setValue(new IntegerLiteral(new Integer(token.image)));
+      } catch(NumberFormatException ex) {
+        lit.setValue(new LongIntegerLiteral(new Long(token.image)));
+      }
+    }
+  | <FLOAT_LITERAL>
     {
-      t= getToken(0);
-	  lit.setValue(new DoubleLiteral(new Double(t.image)));
-	}	  
- 	 | <NULL>
-	{
-	  t= getToken(0);
-	  lit.setValue(NullLiteral.INSTANCE);
-	}
-   	 | <TRUE>
-	{
-	  t= getToken(0);
-	  lit.setValue(TrueLiteral.INSTANCE);
-	}   	 
-   	 | <FALSE>
-	{
-	  t= getToken(0);
-	  lit.setValue(FalseLiteral.INSTANCE);
-	}
-)
+      lit.setValue(new FloatLiteral(new Float(token.image)));
+    }
+  | <DOUBLE_LITERAL>
+    {
+      lit.setValue(new DoubleLiteral(new Double(token.image)));
+    }
+  | <NULL>
+    {
+      lit.setValue(NullLiteral.INSTANCE);
+    }
+  | <TRUE>
+    {
+      lit.setValue(TrueLiteral.INSTANCE);
+    }      
+  | <FALSE>
+    {
+      lit.setValue(FalseLiteral.INSTANCE);
+    }
+  )
     {
       return lit;
     }
@@ -1917,13 +1537,11 @@
 {
 	VariableExpr varExp = new VariableExpr();
 	VarIdentifier var = new VarIdentifier();
-	Token t;
 }
 {
       <VARIABLE>
     {
-     t = getToken(0);//get current token
-     String varName = t.toString(); 
+     String varName = token.image; 
      Identifier ident = lookupSymbol(varName);
      if (isInForbiddenScopes(varName)) {
        throw new ParseException("Inside limit clauses, it is disallowed to reference a variable having the same name as any variable bound in the same scope as the limit clause.");
@@ -1934,7 +1552,7 @@
      } else {
        varExp.setVar(var);     
      }
-     var.setValue(t.toString());        
+     var.setValue(varName);        
      return varExp;
     }
 }
@@ -1944,18 +1562,16 @@
 {
 	VariableExpr varExp = new VariableExpr();
 	VarIdentifier var = new VarIdentifier();
-	Token t;
 }
 {
       <VARIABLE>
     {
-     t = getToken(0);//get current token
-     Identifier ident = lookupSymbol(t.toString());
+     Identifier ident = lookupSymbol(token.image);
      if(ident != null) { // exist such ident
        varExp.setIsNewVar(false);
      }  
      varExp.setVar(var);     
-     var.setValue(t.toString());        
+     var.setValue(token.image);        
      return varExp;
     }
 }
@@ -2060,51 +1676,42 @@
   List<Expression> argList = new ArrayList<Expression>();
   Expression tmp;
   int arity = 0;
-  String funcName=null;
-  String dataverse=null;
-  String libraryName=null;
-  String hint=null;
-  String id1=null;
-  String id2=null;
-  String first = null;
-  String second = null;
-  String third = null;
-  Triple<String,String,String> functionCallComponents = null;
+  Pair<Identifier,Identifier> funcId = null;
+  String funcName;
+  String dataverse;
+  String hint = null;
+  String id1 = null;
+  String id2 = null;
 }
 {  
-    (
-      ( functionCallComponents = getFunctionCallComponents() )
-       {
-         dataverse = functionCallComponents.first == null ? defaultDataverse : functionCallComponents.first;
-         libraryName = functionCallComponents.second;
-         funcName = functionCallComponents.third;
-         if(libraryName != null){
-          funcName = libraryName + ":" + funcName;
-         }
-       } 
-    )
-
+  funcId = FunctionOrTypeName()
     {
-       hint = getHint(token);
+      dataverse = funcId.first.getValue();
+      funcName = funcId.second.getValue();
+      hint = getHint(token);
     }
-     <LEFTPAREN> (tmp = Expression()
-     {
-       argList.add(tmp);
-       arity ++;
-     } ("," tmp = Expression() { argList.add(tmp); arity++; })*)? <RIGHTPAREN>
-
-     {
-       FunctionSignature signature = lookupFunctionSignature(dataverse, funcName, arity);
-             if(signature == null)
-             {
-                signature = new FunctionSignature(dataverse, funcName, arity);
-             }
-       callExpr = new CallExpr(signature,argList);
-       if (hint != null && hint.startsWith(INDEXED_NESTED_LOOP_JOIN_HINT)) {
-          callExpr.addHint(IndexedNLJoinExpressionAnnotation.INSTANCE);
-        }
-       return callExpr;
-     }
+  <LEFTPAREN> (tmp = Expression()
+    {
+      argList.add(tmp);
+      arity ++;
+    }
+  ("," tmp = Expression()
+    {
+      argList.add(tmp);
+      arity++;
+    }
+  )*)? <RIGHTPAREN>
+    {
+      FunctionSignature signature = lookupFunctionSignature(dataverse, funcName, arity);
+      if (signature == null) {
+        signature = new FunctionSignature(dataverse, funcName, arity);
+      }
+      callExpr = new CallExpr(signature,argList);
+      if (hint != null && hint.startsWith(INDEXED_NESTED_LOOP_JOIN_HINT)) {
+        callExpr.addHint(IndexedNLJoinExpressionAnnotation.INSTANCE);
+      }
+      return callExpr;
+    }
 }
 
 Triple<String,String,String> getFunctionCallComponents() throws ParseException:
@@ -2150,27 +1757,45 @@
   List<Expression> argList = new ArrayList<Expression>();
   String funcName;
   String dataverse;
+  String arg1 = null;
+  String arg2 = null;
   LiteralExpr ds;
-  LiteralExpr dvds;
   Expression nameArg;
   int arity = 0;
 }
 {  
-    <DATASET> {dataverse = MetadataConstants.METADATA_DATAVERSE_NAME; funcName = getToken(0).toString();}
-    (
-    (<IDENTIFIER> {ds = new LiteralExpr(); ds.setValue( new StringLiteral(token.image) ); argList.add(ds); arity ++;} ("." <IDENTIFIER> { dvds = new LiteralExpr(); dvds.setValue(new StringLiteral(ds.getValue()+"."+token.image)); argList.remove(0); argList.add(dvds);})? ) |  
-    (<LEFTPAREN> nameArg = Expression() {argList.add(nameArg); arity ++;} ("," nameArg = Expression() { argList.add(nameArg); arity++; })* <RIGHTPAREN>)
-    )  
-     
-     {
-       FunctionSignature signature = lookupFunctionSignature(dataverse, funcName.toString(), arity);
-       if(signature == null)
-             {
-                signature = new FunctionSignature(dataverse, funcName.toString(), arity);
-             }
-       callExpr = new CallExpr(signature,argList);
-       return callExpr;
+  <DATASET>
+    {
+      dataverse = MetadataConstants.METADATA_DATAVERSE_NAME;
+      funcName = token.image;
+    }
+  ( ( arg1 = Identifier() ( "." arg2 = Identifier() )? ) 
+    {
+      String name = arg2 == null ? arg1 : arg1 + "." + arg2;
+      ds = new LiteralExpr();
+      ds.setValue( new StringLiteral(name) );
+      argList.add(ds);
+      arity ++;
+    }
+  | ( <LEFTPAREN> nameArg = Expression()
+    {
+      argList.add(nameArg);
+      arity ++;
+    }
+  ( "," nameArg = Expression()
+    {
+      argList.add(nameArg);
+      arity++;
+    }
+  )* <RIGHTPAREN> ) )  
+   {
+     FunctionSignature signature = lookupFunctionSignature(dataverse, funcName, arity);
+     if (signature == null) {
+       signature = new FunctionSignature(dataverse, funcName, arity);
      }
+     callExpr = new CallExpr(signature,argList);
+     return callExpr;
+   }
 }
 
 Expression ParenthesizedExpression() throws ParseException:
@@ -2235,7 +1860,6 @@
        | clause = GroupClause() 
        | clause = LimitClause()
        | clause = DistinctClause()
-       | clause = DieClause()
     )
     {
       return clause;
@@ -2469,21 +2093,6 @@
   }
 }
 
-DieClause DieClause() throws ParseException:
-{
-	DieClause lc = new DieClause();
-	Expression expr;
-	pushForbiddenScope(getCurrentScope());
-}
-{
-  "die" "after" expr = Expression()    { lc.setDieExpr(expr);    }
-  {
-    popForbiddenScope();   
-    return lc;
-  }
-}
-
-
 QuantifiedExpression QuantifiedExpression()throws ParseException:
 {
   QuantifiedExpression qc = new QuantifiedExpression();
diff --git a/asterix-common/pom.xml b/asterix-common/pom.xml
index 5930ea5..91b89d4 100644
--- a/asterix-common/pom.xml
+++ b/asterix-common/pom.xml
@@ -20,6 +20,46 @@
 				</configuration>
 			</plugin>
 			<plugin>
+				<groupId>org.jvnet.jaxb2.maven2</groupId>
+				<artifactId>maven-jaxb2-plugin</artifactId>
+				<executions>
+					<execution>
+						<id>configuration</id>
+						<goals>
+							<goal>generate</goal>
+						</goals>
+						<configuration>
+							<args>
+								<arg>-Xsetters</arg>
+								<arg>-Xvalue-constructor</arg>
+							</args>
+							<plugins>
+								<plugin>
+									<groupId>org.jvnet.jaxb2_commons</groupId>
+									<artifactId>jaxb2-basics</artifactId>
+									<version>0.6.2</version>
+								</plugin>
+								<plugin>
+									<groupId>org.jvnet.jaxb2_commons</groupId>
+									<artifactId>jaxb2-value-constructor</artifactId>
+									<version>3.0</version>
+								</plugin>
+							</plugins>
+							<schemaDirectory>src/main/resources/schema</schemaDirectory>
+							<schemaIncludes>
+								<include>asterix-conf.xsd</include>
+							</schemaIncludes>
+							<generatePackage>edu.uci.ics.asterix.common.configuration</generatePackage>
+							<bindingDirectory>src/main/resources/schema</bindingDirectory>
+							<bindingIncludes>
+								<bindingInclude>jaxb-bindings.xjb</bindingInclude>
+							</bindingIncludes>
+							<generateDirectory>${project.build.directory}/generated-sources/configuration</generateDirectory>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
 				<groupId>org.apache.maven.plugins</groupId>
 				<artifactId>maven-jar-plugin</artifactId>
 				<version>2.2</version>
@@ -59,6 +99,16 @@
 			<artifactId>hyracks-dataflow-std</artifactId>
 		</dependency>
 		<dependency>
+			<groupId>commons-io</groupId>
+			<artifactId>commons-io</artifactId>
+			<version>1.4</version>
+		</dependency>
+		<dependency>
+			<groupId>commons-httpclient</groupId>
+			<artifactId>commons-httpclient</artifactId>
+			<version>3.0.1</version>
+		</dependency>
+		<dependency>
 			<groupId>edu.uci.ics.asterix</groupId>
 			<artifactId>asterix-transactions</artifactId>
 			<version>0.0.6-SNAPSHOT</version>
@@ -82,6 +132,18 @@
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-storage-am-lsm-common</artifactId>
 		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.asterix</groupId>
+			<artifactId>asterix-test-framework</artifactId>
+			<version>0.0.6-SNAPSHOT</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.8.1</version>
+			<scope>test</scope>
+		</dependency>
 	</dependencies>
 
 </project>
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfo.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfo.java
new file mode 100644
index 0000000..06a1c1f
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfo.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.common.api;
+
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
+import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
+import edu.uci.ics.asterix.common.context.AsterixRuntimeComponentsProvider;
+import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+/*
+ * Acts as an holder class for IndexRegistryProvider, AsterixStorageManager
+ * instances that are accessed from the NCs. In addition an instance of ICCApplicationContext 
+ * is stored for access by the CC.
+ */
+public class AsterixAppContextInfo implements IAsterixApplicationContextInfo, IAsterixPropertiesProvider {
+
+    private static AsterixAppContextInfo INSTANCE;
+
+    private final ICCApplicationContext appCtx;
+
+    private AsterixCompilerProperties compilerProperties;
+    private AsterixExternalProperties externalProperties;
+    private AsterixMetadataProperties metadataProperties;
+    private AsterixStorageProperties storageProperties;
+    private AsterixTransactionProperties txnProperties;
+
+    public static void initialize(ICCApplicationContext ccAppCtx) throws AsterixException {
+        if (INSTANCE == null) {
+            INSTANCE = new AsterixAppContextInfo(ccAppCtx);
+        }
+        AsterixPropertiesAccessor propertiesAccessor = new AsterixPropertiesAccessor();
+        INSTANCE.compilerProperties = new AsterixCompilerProperties(propertiesAccessor);
+        INSTANCE.externalProperties = new AsterixExternalProperties(propertiesAccessor);
+        INSTANCE.metadataProperties = new AsterixMetadataProperties(propertiesAccessor);
+        INSTANCE.storageProperties = new AsterixStorageProperties(propertiesAccessor);
+        INSTANCE.txnProperties = new AsterixTransactionProperties(propertiesAccessor);
+
+        Logger.getLogger("edu.uci.ics").setLevel(INSTANCE.externalProperties.getLogLevel());
+    }
+
+    private AsterixAppContextInfo(ICCApplicationContext ccAppCtx) {
+        this.appCtx = ccAppCtx;
+    }
+
+    public static AsterixAppContextInfo getInstance() {
+        return INSTANCE;
+    }
+
+    @Override
+    public IStorageManagerInterface getStorageManagerInterface() {
+        return AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER;
+    }
+
+    @Override
+    public ICCApplicationContext getCCApplicationContext() {
+        return appCtx;
+    }
+
+    @Override
+    public IIndexLifecycleManagerProvider getIndexLifecycleManagerProvider() {
+        return AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER;
+    }
+
+    @Override
+    public AsterixStorageProperties getStorageProperties() {
+        return storageProperties;
+    }
+
+    @Override
+    public AsterixTransactionProperties getTransactionProperties() {
+        return txnProperties;
+    }
+
+    @Override
+    public AsterixCompilerProperties getCompilerProperties() {
+        return compilerProperties;
+    }
+
+    @Override
+    public AsterixMetadataProperties getMetadataProperties() {
+        return metadataProperties;
+    }
+
+    @Override
+    public AsterixExternalProperties getExternalProperties() {
+        return externalProperties;
+    }
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java
deleted file mode 100644
index 0f1de56..0000000
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.common.api;
-
-import edu.uci.ics.asterix.common.context.AsterixRuntimeComponentsProvider;
-import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
-import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-
-/*
- * Acts as an holder class for IndexRegistryProvider, AsterixStorageManager
- * instances that are accessed from the NCs. In addition an instance of ICCApplicationContext 
- * is stored for access by the CC.
- */
-public class AsterixAppContextInfoImpl implements IAsterixApplicationContextInfo {
-
-    private static AsterixAppContextInfoImpl INSTANCE;
-
-    private final ICCApplicationContext appCtx;
-
-    public static void initialize(ICCApplicationContext ccAppCtx) {
-        if (INSTANCE == null) {
-            INSTANCE = new AsterixAppContextInfoImpl(ccAppCtx);
-        }
-    }
-
-    private AsterixAppContextInfoImpl(ICCApplicationContext ccAppCtx) {
-        this.appCtx = ccAppCtx;
-    }
-
-    public static IAsterixApplicationContextInfo getInstance() {
-        return INSTANCE;
-    }
-
-    @Override
-    public IStorageManagerInterface getStorageManagerInterface() {
-        return AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER;
-    }
-
-    @Override
-    public ICCApplicationContext getCCApplicationContext() {
-        return appCtx;
-    }
-
-    @Override
-    public IIndexLifecycleManagerProvider getIndexLifecycleManagerProvider() {
-        return AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER;
-    }
-}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AbstractAsterixProperties.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AbstractAsterixProperties.java
new file mode 100644
index 0000000..4968f40
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AbstractAsterixProperties.java
@@ -0,0 +1,9 @@
+package edu.uci.ics.asterix.common.config;
+
+public abstract class AbstractAsterixProperties {
+    protected final AsterixPropertiesAccessor accessor;
+
+    public AbstractAsterixProperties(AsterixPropertiesAccessor accessor) {
+        this.accessor = accessor;
+    }
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixCompilerProperties.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixCompilerProperties.java
new file mode 100644
index 0000000..b1767b9
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixCompilerProperties.java
@@ -0,0 +1,31 @@
+package edu.uci.ics.asterix.common.config;
+
+public class AsterixCompilerProperties extends AbstractAsterixProperties {
+    private static final String COMPILER_SORTMEMORY_KEY = "compiler.sortmemory";
+    private static final long COMPILER_SORTMEMORY_DEFAULT = (512 << 20); // 512MB
+
+    private static final String COMPILER_JOINMEMORY_KEY = "compiler.joinmemory";
+    private static final long COMPILER_JOINMEMORY_DEFAULT = (512 << 20); // 512MB
+
+    private static final String COMPILER_FRAMESIZE_KEY = "compiler.framesize";
+    private static int COMPILER_FRAMESIZE_DEFAULT = (32 << 10); // 32KB
+
+    public AsterixCompilerProperties(AsterixPropertiesAccessor accessor) {
+        super(accessor);
+    }
+
+    public long getSortMemorySize() {
+        return accessor.getProperty(COMPILER_SORTMEMORY_KEY, COMPILER_SORTMEMORY_DEFAULT,
+                PropertyInterpreters.getLongPropertyInterpreter());
+    }
+
+    public long getJoinMemorySize() {
+        return accessor.getProperty(COMPILER_JOINMEMORY_KEY, COMPILER_JOINMEMORY_DEFAULT,
+                PropertyInterpreters.getLongPropertyInterpreter());
+    }
+
+    public int getFrameSize() {
+        return accessor.getProperty(COMPILER_FRAMESIZE_KEY, COMPILER_FRAMESIZE_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java
new file mode 100644
index 0000000..cf38932
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java
@@ -0,0 +1,34 @@
+package edu.uci.ics.asterix.common.config;
+
+import java.util.logging.Level;
+
+public class AsterixExternalProperties extends AbstractAsterixProperties {
+
+    private static final String EXTERNAL_WEBPORT_KEY = "web.port";
+    private static int EXTERNAL_WEBPORT_DEFAULT = 19001;
+
+    private static final String EXTERNAL_LOGLEVEL_KEY = "log.level";
+    private static Level EXTERNAL_LOGLEVEL_DEFAULT = Level.INFO;
+
+    private static final String EXTERNAL_APISERVER_KEY = "api.port";
+    private static int EXTERNAL_APISERVER_DEFAULT = 19101;
+
+    public AsterixExternalProperties(AsterixPropertiesAccessor accessor) {
+        super(accessor);
+    }
+
+    public int getWebInterfacePort() {
+        return accessor.getProperty(EXTERNAL_WEBPORT_KEY, EXTERNAL_WEBPORT_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public int getAPIServerPort() {
+        return accessor.getProperty(EXTERNAL_APISERVER_KEY, EXTERNAL_APISERVER_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public Level getLogLevel() {
+        return accessor.getProperty(EXTERNAL_LOGLEVEL_KEY, EXTERNAL_LOGLEVEL_DEFAULT,
+                PropertyInterpreters.getLevelPropertyInterpreter());
+    }
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixMetadataProperties.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixMetadataProperties.java
new file mode 100644
index 0000000..6d47e78
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixMetadataProperties.java
@@ -0,0 +1,28 @@
+package edu.uci.ics.asterix.common.config;
+
+import java.util.Map;
+import java.util.Set;
+
+public class AsterixMetadataProperties extends AbstractAsterixProperties {
+
+    public AsterixMetadataProperties(AsterixPropertiesAccessor accessor) {
+        super(accessor);
+    }
+
+    public String getMetadataNodeName() {
+        return accessor.getMetadataNodeName();
+    }
+
+    public String getMetadataStore() {
+        return accessor.getMetadataStore();
+    }
+
+    public Map<String, String[]> getStores() {
+        return accessor.getStores();
+    }
+
+    public Set<String> getNodeNames() {
+        return accessor.getNodeNames();
+    }
+
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixPropertiesAccessor.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixPropertiesAccessor.java
new file mode 100644
index 0000000..7b2f2a6
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixPropertiesAccessor.java
@@ -0,0 +1,105 @@
+package edu.uci.ics.asterix.common.config;
+
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Unmarshaller;
+
+import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
+import edu.uci.ics.asterix.common.configuration.Property;
+import edu.uci.ics.asterix.common.configuration.Store;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+public class AsterixPropertiesAccessor {
+    private static final Logger LOGGER = Logger.getLogger(AsterixPropertiesAccessor.class.getName());
+
+    private final String metadataNodeName;
+    private final Set<String> nodeNames;
+    private final Map<String, String[]> stores;
+    private final Map<String, Property> asterixConfigurationParams;
+
+    public AsterixPropertiesAccessor() throws AsterixException {
+        String fileName = System.getProperty(GlobalConfig.CONFIG_FILE_PROPERTY);
+        if (fileName == null) {
+            fileName = GlobalConfig.DEFAULT_CONFIG_FILE_NAME;
+        }
+        InputStream is = this.getClass().getClassLoader().getResourceAsStream(fileName);
+        if (is == null) {
+            try {
+                fileName = GlobalConfig.DEFAULT_CONFIG_FILE_NAME;
+                is = new FileInputStream(fileName);
+            } catch (FileNotFoundException fnf) {
+                throw new AsterixException("Could not find configuration file " + fileName);
+            }
+        }
+
+        AsterixConfiguration asterixConfiguration = null;
+        try {
+            JAXBContext ctx = JAXBContext.newInstance(AsterixConfiguration.class);
+            Unmarshaller unmarshaller = ctx.createUnmarshaller();
+            asterixConfiguration = (AsterixConfiguration) unmarshaller.unmarshal(is);
+        } catch (JAXBException e) {
+            throw new AsterixException("Failed to read configuration file " + fileName);
+        }
+        metadataNodeName = asterixConfiguration.getMetadataNode();
+        stores = new HashMap<String, String[]>();
+        List<Store> configuredStores = asterixConfiguration.getStore();
+        nodeNames = new HashSet<String>();
+        for (Store store : configuredStores) {
+            String trimmedStoreDirs = store.getStoreDirs().trim();
+            stores.put(store.getNcId(), trimmedStoreDirs.split(","));
+            nodeNames.add(store.getNcId());
+        }
+        asterixConfigurationParams = new HashMap<String, Property>();
+        for (Property p : asterixConfiguration.getProperty()) {
+            asterixConfigurationParams.put(p.getName(), p);
+        }
+    }
+
+    public String getMetadataNodeName() {
+        return metadataNodeName;
+    }
+
+    public String getMetadataStore() {
+        return stores.get(metadataNodeName)[0];
+    }
+
+    public Map<String, String[]> getStores() {
+        return stores;
+    }
+
+    public Set<String> getNodeNames() {
+        return nodeNames;
+    }
+
+    public <T> T getProperty(String property, T defaultValue, IPropertyInterpreter<T> interpreter) {
+        Property p = asterixConfigurationParams.get(property);
+        if (p == null) {
+            return defaultValue;
+        }
+
+        try {
+            return interpreter.interpret(p);
+        } catch (IllegalArgumentException e) {
+            logConfigurationError(p, defaultValue);
+            throw e;
+        }
+    }
+
+    private <T> void logConfigurationError(Property p, T defaultValue) {
+        if (LOGGER.isLoggable(Level.SEVERE)) {
+            LOGGER.severe("Invalid property value '" + p.getValue() + "' for property '" + p.getName()
+                    + "'.\n See the description: \n" + p.getDescription() + "\nDefault = " + defaultValue);
+        }
+    }
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixStorageProperties.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixStorageProperties.java
new file mode 100644
index 0000000..d34e4ac
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixStorageProperties.java
@@ -0,0 +1,72 @@
+package edu.uci.ics.asterix.common.config;
+
+public class AsterixStorageProperties extends AbstractAsterixProperties {
+
+    private static final String STORAGE_BUFFERCACHE_PAGESIZE_KEY = "storage.buffercache.pagesize";
+    private static int STORAGE_BUFFERCACHE_PAGESIZE_DEFAULT = (32 << 10); // 32KB
+
+    private static final String STORAGE_BUFFERCACHE_NUMPAGES_KEY = "storage.buffercache.numpages";
+    private static final int STORAGE_BUFFERCACHE_NUMPAGES_DEFAULT = 1024;
+
+    private static final String STORAGE_BUFFERCACHE_MAXOPENFILES_KEY = "storage.buffercache.maxopenfiles";
+    private static int STORAGE_BUFFERCACHE_MAXOPENFILES_DEFAULT = Integer.MAX_VALUE;
+
+    private static final String STORAGE_MEMORYCOMPONENT_PAGESIZE_KEY = "storage.memorycomponent.pagesize";
+    private static final int STORAGE_MEMORYCOMPONENT_PAGESIZE_DEFAULT = (32 << 10); // 32KB
+
+    private static final String STORAGE_MEMORYCOMPONENT_NUMPAGES_KEY = "storage.memorycomponent.numpages";
+    private static final int STORAGE_MEMORYCOMPONENT_NUMPAGES_DEFAULT = 4096; // ... so 128MB components
+
+    private static final String STORAGE_MEMORYCOMPONENT_GLOBALBUDGET_KEY = "storage.memorycomponent.globalbudget";
+    private static final long STORAGE_MEMORYCOMPONENT_GLOBALBUDGET_DEFAULT = (1 << 30); // 1GB
+
+    private static final String STORAGE_LSM_MERGETHRESHOLD_KEY = "storage.lsm.mergethreshold";
+    private static int STORAGE_LSM_MERGETHRESHOLD_DEFAULT = 3;
+
+    private static final String STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_KEY = "storage.lsm.bloomfilter.falsepositiverate";
+    private static double STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_DEFAULT = 0.01;
+
+    public AsterixStorageProperties(AsterixPropertiesAccessor accessor) {
+        super(accessor);
+    }
+
+    public int getBufferCachePageSize() {
+        return accessor.getProperty(STORAGE_BUFFERCACHE_PAGESIZE_KEY, STORAGE_BUFFERCACHE_PAGESIZE_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public int getBufferCacheNumPages() {
+        return accessor.getProperty(STORAGE_BUFFERCACHE_NUMPAGES_KEY, STORAGE_BUFFERCACHE_NUMPAGES_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public int getBufferCacheMaxOpenFiles() {
+        return accessor.getProperty(STORAGE_BUFFERCACHE_MAXOPENFILES_KEY, STORAGE_BUFFERCACHE_MAXOPENFILES_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public int getMemoryComponentPageSize() {
+        return accessor.getProperty(STORAGE_MEMORYCOMPONENT_PAGESIZE_KEY, STORAGE_MEMORYCOMPONENT_PAGESIZE_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public int getMemoryComponentNumPages() {
+        return accessor.getProperty(STORAGE_MEMORYCOMPONENT_NUMPAGES_KEY, STORAGE_MEMORYCOMPONENT_NUMPAGES_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public long getMemoryComponentGlobalBudget() {
+        return accessor.getProperty(STORAGE_MEMORYCOMPONENT_GLOBALBUDGET_KEY,
+                STORAGE_MEMORYCOMPONENT_GLOBALBUDGET_DEFAULT, PropertyInterpreters.getLongPropertyInterpreter());
+    }
+
+    public int getLSMIndexMergeThreshold() {
+        return accessor.getProperty(STORAGE_LSM_MERGETHRESHOLD_KEY, STORAGE_LSM_MERGETHRESHOLD_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public double getBloomFilterFalsePositiveRate() {
+        return accessor.getProperty(STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_KEY,
+                STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_DEFAULT, PropertyInterpreters.getDoublePropertyInterpreter());
+    }
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixTransactionProperties.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixTransactionProperties.java
new file mode 100644
index 0000000..d97e53b
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixTransactionProperties.java
@@ -0,0 +1,72 @@
+package edu.uci.ics.asterix.common.config;
+
+public class AsterixTransactionProperties extends AbstractAsterixProperties {
+    private static final String TXN_LOG_BUFFER_NUMPAGES_KEY = "txn.log.buffer.numpages";
+    private static int TXN_LOG_BUFFER_NUMPAGES_DEFAULT = 8;
+
+    private static final String TXN_LOG_BUFFER_PAGESIZE_KEY = "txn.log.buffer.pagesize";
+    private static final int TXN_LOG_BUFFER_PAGESIZE_DEFAULT = (128 << 10); // 128KB
+
+    private static final String TXN_LOG_PARTITIONSIZE_KEY = "txn.log.partitionsize";
+    private static final long TXN_LOG_PARTITIONSIZE_DEFAULT = (2 << 30); // 2GB
+
+    private static final String TXN_LOG_GROUPCOMMITINTERVAL_KEY = "txn.log.groupcommitinterval";
+    private static int TXN_LOG_GROUPCOMMITINTERVAL_DEFAULT = 200; // 200ms
+
+    private static final String TXN_LOG_CHECKPOINT_LSNTHRESHOLD_KEY = "txn.log.checkpoint.lsnthreshold";
+    private static final int TXN_LOG_CHECKPOINT_LSNTHRESHOLD_DEFAULT = (64 << 20); // 64M
+
+    private static final String TXN_LOG_CHECKPOINT_POLLFREQUENCY_KEY = "txn.log.checkpoint.pollfrequency";
+    private static int TXN_LOG_CHECKPOINT_POLLFREQUENCY_DEFAULT = 120; // 120s
+
+    private static final String TXN_LOCK_ESCALATIONTHRESHOLD_KEY = "txn.lock.escalationthreshold";
+    private static int TXN_LOCK_ESCALATIONTHRESHOLD_DEFAULT = 1000;
+
+    private static final String TXN_LOCK_SHRINKTIMER_KEY = "txn.lock.shrinktimer";
+    private static int TXN_LOCK_SHRINKTIMER_DEFAULT = 120000; // 2m
+
+    public AsterixTransactionProperties(AsterixPropertiesAccessor accessor) {
+        super(accessor);
+    }
+
+    public int getLogBufferNumPages() {
+        return accessor.getProperty(TXN_LOG_BUFFER_NUMPAGES_KEY, TXN_LOG_BUFFER_NUMPAGES_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public int getLogBufferPageSize() {
+        return accessor.getProperty(TXN_LOG_BUFFER_PAGESIZE_KEY, TXN_LOG_BUFFER_PAGESIZE_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public long getLogPartitionSize() {
+        return accessor.getProperty(TXN_LOG_PARTITIONSIZE_KEY, TXN_LOG_PARTITIONSIZE_DEFAULT,
+                PropertyInterpreters.getLongPropertyInterpreter());
+    }
+
+    public int getGroupCommitInterval() {
+        return accessor.getProperty(TXN_LOG_GROUPCOMMITINTERVAL_KEY, TXN_LOG_GROUPCOMMITINTERVAL_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public int getCheckpointLSNThreshold() {
+        return accessor.getProperty(TXN_LOG_CHECKPOINT_LSNTHRESHOLD_KEY, TXN_LOG_CHECKPOINT_LSNTHRESHOLD_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public int getCheckpointPollFrequency() {
+        return accessor.getProperty(TXN_LOG_CHECKPOINT_POLLFREQUENCY_KEY, TXN_LOG_CHECKPOINT_POLLFREQUENCY_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public int getEntityToDatasetLockEscalationThreshold() {
+        return accessor.getProperty(TXN_LOCK_ESCALATIONTHRESHOLD_KEY, TXN_LOCK_ESCALATIONTHRESHOLD_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    public int getLockManagerShrinkTimer() {
+        return accessor.getProperty(TXN_LOCK_SHRINKTIMER_KEY, TXN_LOCK_SHRINKTIMER_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/GlobalConfig.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/GlobalConfig.java
index 1183b65..3948e69 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/GlobalConfig.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/GlobalConfig.java
@@ -9,11 +9,7 @@
 
     public static final Logger ASTERIX_LOGGER = Logger.getLogger(ASTERIX_LOGGER_NAME);
 
-    public static final String ASTERIX_LOGFILE_PATTERN = "%t/asterix.log";
-
-    public static final String DEFAULT_CONFIG_FILE_NAME = "test.properties";
-
-    public static final String TEST_CONFIG_FILE_NAME = "src/main/resources/test.properties";
+    public static final String DEFAULT_CONFIG_FILE_NAME = "asterix-configuration.xml";
 
     public static final String CONFIG_FILE_PROPERTY = "AsterixConfigFileName";
 
@@ -21,12 +17,6 @@
 
     public static final String JSON_API_SERVER_PORT_PROPERTY = "AsterixJSONAPIServerPort";
 
-    public static final String BUFFER_CACHE_PAGE_SIZE_PROPERTY = "BufferCachePageSize";
-
-    public static final String BUFFER_CACHE_NUM_PAGES_PROPERTY = "BufferCacheNumPages";
-
-    public static final int DEFAULT_BUFFER_CACHE_NUM_PAGES = 4096;
-
     public static final int DEFAULT_FRAME_SIZE = 32768;
 
     public static final String FRAME_SIZE_PROPERTY = "FrameSize";
@@ -35,10 +25,6 @@
 
     public static int DEFAULT_INPUT_DATA_COLUMN = 0;
 
-    public static int DEFAULT_INDEX_MEM_PAGE_SIZE = 32768;
-
-    public static int DEFAULT_INDEX_MEM_NUM_PAGES = 1000;
-
     public static int getFrameSize() {
         int frameSize = GlobalConfig.DEFAULT_FRAME_SIZE;
         String frameSizeStr = System.getProperty(GlobalConfig.FRAME_SIZE_PROPERTY);
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/IAsterixPropertiesProvider.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/IAsterixPropertiesProvider.java
new file mode 100644
index 0000000..e42c827
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/IAsterixPropertiesProvider.java
@@ -0,0 +1,14 @@
+package edu.uci.ics.asterix.common.config;
+
+
+public interface IAsterixPropertiesProvider {
+    public AsterixStorageProperties getStorageProperties();
+
+    public AsterixTransactionProperties getTransactionProperties();
+
+    public AsterixCompilerProperties getCompilerProperties();
+
+    public AsterixMetadataProperties getMetadataProperties();
+
+    public AsterixExternalProperties getExternalProperties();
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/IPropertyInterpreter.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/IPropertyInterpreter.java
new file mode 100644
index 0000000..2cabbe5
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/IPropertyInterpreter.java
@@ -0,0 +1,7 @@
+package edu.uci.ics.asterix.common.config;
+
+import edu.uci.ics.asterix.common.configuration.Property;
+
+public interface IPropertyInterpreter<T> {
+    public T interpret(Property p) throws IllegalArgumentException;
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/PropertyInterpreters.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/PropertyInterpreters.java
new file mode 100644
index 0000000..9ff5dee
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/PropertyInterpreters.java
@@ -0,0 +1,71 @@
+package edu.uci.ics.asterix.common.config;
+
+import java.util.logging.Level;
+
+import edu.uci.ics.asterix.common.configuration.Property;
+
+public class PropertyInterpreters {
+
+    public static IPropertyInterpreter<Integer> getIntegerPropertyInterpreter() {
+        return new IPropertyInterpreter<Integer>() {
+
+            @Override
+            public Integer interpret(Property p) throws IllegalArgumentException {
+                try {
+                    return Integer.parseInt(p.getValue());
+                } catch (NumberFormatException e) {
+                    throw new IllegalArgumentException(e);
+                }
+            }
+        };
+    }
+
+    public static IPropertyInterpreter<Long> getLongPropertyInterpreter() {
+        return new IPropertyInterpreter<Long>() {
+
+            @Override
+            public Long interpret(Property p) throws IllegalArgumentException {
+                try {
+                    return Long.parseLong(p.getValue());
+                } catch (NumberFormatException e) {
+                    throw new IllegalArgumentException(e);
+                }
+            }
+        };
+    }
+
+    public static IPropertyInterpreter<Level> getLevelPropertyInterpreter() {
+        return new IPropertyInterpreter<Level>() {
+
+            @Override
+            public Level interpret(Property p) throws IllegalArgumentException {
+                return Level.parse(p.getValue());
+            }
+        };
+    }
+
+    public static IPropertyInterpreter<String> getStringPropertyInterpreter() {
+        return new IPropertyInterpreter<String>() {
+
+            @Override
+            public String interpret(Property p) throws IllegalArgumentException {
+                return p.getValue();
+            }
+        };
+    }
+
+    public static IPropertyInterpreter<Double> getDoublePropertyInterpreter() {
+        return new IPropertyInterpreter<Double>() {
+
+            @Override
+            public Double interpret(Property p) throws IllegalArgumentException {
+                try {
+                    return Double.parseDouble(p.getValue());
+                } catch (NumberFormatException e) {
+                    throw new IllegalArgumentException(e);
+                }
+            }
+        };
+    }
+
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
index 5070938..03e84ef 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
@@ -1,9 +1,16 @@
 package edu.uci.ics.asterix.common.context;
 
 import java.io.IOException;
-import java.util.logging.Level;
+import java.util.logging.Logger;
 
-import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
+import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
 import edu.uci.ics.asterix.transaction.management.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
 import edu.uci.ics.asterix.transaction.management.ioopcallbacks.LSMInvertedIndexIOOperationCallbackFactory;
@@ -39,12 +46,15 @@
 import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
 import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactoryProvider;
 
-public class AsterixAppRuntimeContext {
-    private static final int DEFAULT_BUFFER_CACHE_PAGE_SIZE = 32768;
-    private static final int DEFAULT_LIFECYCLEMANAGER_MEMORY_BUDGET = 1024 * 1024 * 1024; // 1GB
-    private static final int DEFAULT_MAX_OPEN_FILES = Integer.MAX_VALUE;
+public class AsterixAppRuntimeContext implements IAsterixPropertiesProvider {
     private final INCApplicationContext ncApplicationContext;
 
+    private AsterixCompilerProperties compilerProperties;
+    private AsterixExternalProperties externalProperties;
+    private AsterixMetadataProperties metadataProperties;
+    private AsterixStorageProperties storageProperties;
+    private AsterixTransactionProperties txnProperties;
+
     private IIndexLifecycleManager indexLifecycleManager;
     private IFileMapManager fileMapManager;
     private IBufferCache bufferCache;
@@ -64,24 +74,29 @@
         this.ncApplicationContext = ncApplicationContext;
     }
 
-    public void initialize() throws IOException, ACIDException {
-        int pageSize = getBufferCachePageSize();
-        int numPages = getBufferCacheNumPages();
+    public void initialize() throws IOException, ACIDException, AsterixException {
+        AsterixPropertiesAccessor propertiesAccessor = new AsterixPropertiesAccessor();
+        compilerProperties = new AsterixCompilerProperties(propertiesAccessor);
+        externalProperties = new AsterixExternalProperties(propertiesAccessor);
+        metadataProperties = new AsterixMetadataProperties(propertiesAccessor);
+        storageProperties = new AsterixStorageProperties(propertiesAccessor);
+        txnProperties = new AsterixTransactionProperties(propertiesAccessor);
+
+        Logger.getLogger("edu.uci.ics").setLevel(externalProperties.getLogLevel());
 
         fileMapManager = new AsterixFileMapManager();
         ICacheMemoryAllocator allocator = new HeapBufferAllocator();
         IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
-        ioManager = ncApplicationContext.getRootContext().getIOManager();
-        indexLifecycleManager = new IndexLifecycleManager(DEFAULT_LIFECYCLEMANAGER_MEMORY_BUDGET);
-        IAsterixAppRuntimeContextProvider asterixAppRuntimeContextProvider = new AsterixAppRuntimeContextProviderForRecovery(
-                this);
-        txnSubsystem = new TransactionSubsystem(ncApplicationContext.getNodeId(), asterixAppRuntimeContextProvider);
         IPageCleanerPolicy pcp = new DelayPageCleanerPolicy(600000);
-        bufferCache = new BufferCache(ioManager, allocator, prs, pcp, fileMapManager, pageSize, numPages,
-                DEFAULT_MAX_OPEN_FILES);
+        ioManager = ncApplicationContext.getRootContext().getIOManager();
+        bufferCache = new BufferCache(ioManager, allocator, prs, pcp, fileMapManager,
+                storageProperties.getBufferCachePageSize(), storageProperties.getBufferCacheNumPages(),
+                storageProperties.getBufferCacheMaxOpenFiles());
+
+        indexLifecycleManager = new IndexLifecycleManager(storageProperties.getMemoryComponentGlobalBudget());
 
         lsmIOScheduler = SynchronousScheduler.INSTANCE;
-        mergePolicy = new ConstantMergePolicy(3, this);
+        mergePolicy = new ConstantMergePolicy(storageProperties.getLSMIndexMergeThreshold(), this);
         lsmBTreeOpTrackerFactory = new IndexOperationTrackerFactory(LSMBTreeIOOperationCallbackFactory.INSTANCE);
         lsmRTreeOpTrackerFactory = new IndexOperationTrackerFactory(LSMRTreeIOOperationCallbackFactory.INSTANCE);
         lsmInvertedIndexOpTrackerFactory = new IndexOperationTrackerFactory(
@@ -92,6 +107,10 @@
         localResourceRepository = (PersistentLocalResourceRepository) persistentLocalResourceRepositoryFactory
                 .createRepository();
         resourceIdFactory = (new ResourceIdFactoryProvider(localResourceRepository)).createResourceIdFactory();
+
+        IAsterixAppRuntimeContextProvider asterixAppRuntimeContextProvider = new AsterixAppRuntimeContextProviderForRecovery(
+                this);
+        txnSubsystem = new TransactionSubsystem(ncApplicationContext.getNodeId(), asterixAppRuntimeContextProvider);
         isShuttingdown = false;
     }
 
@@ -103,49 +122,6 @@
         this.isShuttingdown = isShuttingdown;
     }
 
-    private int getBufferCachePageSize() {
-        int pageSize = DEFAULT_BUFFER_CACHE_PAGE_SIZE;
-        String pageSizeStr = System.getProperty(GlobalConfig.BUFFER_CACHE_PAGE_SIZE_PROPERTY, null);
-        if (pageSizeStr != null) {
-            try {
-                pageSize = Integer.parseInt(pageSizeStr);
-            } catch (NumberFormatException nfe) {
-                if (GlobalConfig.ASTERIX_LOGGER.isLoggable(Level.WARNING)) {
-                    GlobalConfig.ASTERIX_LOGGER.warning("Wrong buffer cache page size argument. "
-                            + "Using default value: " + pageSize);
-                }
-            }
-        }
-
-        if (GlobalConfig.ASTERIX_LOGGER.isLoggable(Level.INFO)) {
-            GlobalConfig.ASTERIX_LOGGER.info("Buffer cache page size: " + pageSize);
-        }
-
-        return pageSize;
-    }
-
-    private int getBufferCacheNumPages() {
-        int numPages = GlobalConfig.DEFAULT_BUFFER_CACHE_NUM_PAGES;
-        String numPagesStr = System.getProperty(GlobalConfig.BUFFER_CACHE_NUM_PAGES_PROPERTY, null);
-        if (numPagesStr != null) {
-            try {
-                numPages = Integer.parseInt(numPagesStr);
-            } catch (NumberFormatException nfe) {
-                if (GlobalConfig.ASTERIX_LOGGER.isLoggable(Level.WARNING)) {
-                    GlobalConfig.ASTERIX_LOGGER.warning("Wrong buffer cache size argument. " + "Using default value: "
-                            + numPages);
-                }
-                return numPages;
-            }
-        }
-
-        if (GlobalConfig.ASTERIX_LOGGER.isLoggable(Level.INFO)) {
-            GlobalConfig.ASTERIX_LOGGER.info("Buffer cache size (number of pages): " + numPages);
-        }
-
-        return numPages;
-    }
-
     public void deinitialize() throws HyracksDataException {
         bufferCache.close();
         for (IIndex index : indexLifecycleManager.getOpenIndexes()) {
@@ -216,4 +192,29 @@
     public IIOManager getIOManager() {
         return ioManager;
     }
+
+    @Override
+    public AsterixStorageProperties getStorageProperties() {
+        return storageProperties;
+    }
+
+    @Override
+    public AsterixTransactionProperties getTransactionProperties() {
+        return txnProperties;
+    }
+
+    @Override
+    public AsterixCompilerProperties getCompilerProperties() {
+        return compilerProperties;
+    }
+
+    @Override
+    public AsterixMetadataProperties getMetadataProperties() {
+        return metadataProperties;
+    }
+
+    @Override
+    public AsterixExternalProperties getExternalProperties() {
+        return externalProperties;
+    }
 }
\ No newline at end of file
diff --git a/asterix-common/src/main/resources/schema/asterix-conf.xsd b/asterix-common/src/main/resources/schema/asterix-conf.xsd
new file mode 100644
index 0000000..f53fb4b
--- /dev/null
+++ b/asterix-common/src/main/resources/schema/asterix-conf.xsd
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="ISO-8859-1" ?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+	xmlns:mg="asterixconf" targetNamespace="asterixconf"
+	elementFormDefault="qualified">
+
+	<!-- definition of simple types -->
+
+        
+	<xs:element name="metadataNode" type="xs:string" />
+	<xs:element name="storeDirs" type="xs:string" />
+	<xs:element name="ncId" type="xs:string" />
+	<xs:element name="name" type="xs:string" />
+	<xs:element name="value" type="xs:string" />
+	<xs:element name="description" type="xs:string" />
+	
+	<!-- definition of complex elements -->
+	<xs:element name="store">
+		<xs:complexType>
+			<xs:sequence>
+				<xs:element ref="mg:ncId" />
+				<xs:element ref="mg:storeDirs" />
+			</xs:sequence>
+		</xs:complexType>
+	</xs:element>
+
+	<xs:element name="property">
+		<xs:complexType>
+			<xs:sequence>
+				<xs:element ref="mg:name" />
+				<xs:element ref="mg:value" />
+				<xs:element ref="mg:description" />
+			</xs:sequence>
+		</xs:complexType>
+	</xs:element>
+
+
+	<xs:element name="asterixConfiguration">
+		<xs:complexType>
+			<xs:sequence>
+				<xs:element ref="mg:metadataNode" minOccurs="0"/>
+				<xs:element ref="mg:store" maxOccurs="unbounded" />
+				<xs:element ref="mg:property" minOccurs="0" maxOccurs="unbounded" />
+			</xs:sequence>
+		</xs:complexType>
+	</xs:element>
+
+</xs:schema>     
diff --git a/asterix-common/src/main/resources/schema/jaxb-bindings.xjb b/asterix-common/src/main/resources/schema/jaxb-bindings.xjb
new file mode 100644
index 0000000..b5982e0
--- /dev/null
+++ b/asterix-common/src/main/resources/schema/jaxb-bindings.xjb
@@ -0,0 +1,9 @@
+<jxb:bindings version="1.0"
+xmlns:jxb="http://java.sun.com/xml/ns/jaxb"
+xmlns:xs="http://www.w3.org/2001/XMLSchema">
+
+<jxb:globalBindings>
+  <jxb:serializable uid="1"/>
+</jxb:globalBindings>
+
+</jxb:bindings>
diff --git a/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
index 7e9b08f..8449971 100644
--- a/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
+++ b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
@@ -31,398 +31,369 @@
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 import edu.uci.ics.asterix.testframework.context.TestFileContext;
 import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
 
 public class TestsUtils {
 
-	private static final String EXTENSION_AQL_RESULT = "adm";
-	private static final Logger LOGGER = Logger.getLogger(TestsUtils.class
-			.getName());
-	private static Method managixExecuteMethod = null;
+    private static final String EXTENSION_AQL_RESULT = "adm";
+    private static final Logger LOGGER = Logger.getLogger(TestsUtils.class.getName());
+    private static Method managixExecuteMethod = null;
 
-	/**
-	 * Probably does not work well with symlinks.
-	 */
-	public static boolean deleteRec(File path) {
-		if (path.isDirectory()) {
-			for (File f : path.listFiles()) {
-				if (!deleteRec(f)) {
-					return false;
-				}
-			}
-		}
-		return path.delete();
-	}
+    /**
+     * Probably does not work well with symlinks.
+     */
+    public static boolean deleteRec(File path) {
+        if (path.isDirectory()) {
+            for (File f : path.listFiles()) {
+                if (!deleteRec(f)) {
+                    return false;
+                }
+            }
+        }
+        return path.delete();
+    }
 
-	public static void runScriptAndCompareWithResult(File scriptFile,
-			PrintWriter print, File expectedFile, File actualFile)
-			throws Exception {
-		BufferedReader readerExpected = new BufferedReader(
-				new InputStreamReader(new FileInputStream(expectedFile),
-						"UTF-8"));
-		BufferedReader readerActual = new BufferedReader(new InputStreamReader(
-				new FileInputStream(actualFile), "UTF-8"));
-		String lineExpected, lineActual;
-		int num = 1;
-		try {
-			while ((lineExpected = readerExpected.readLine()) != null) {
-				lineActual = readerActual.readLine();
-				// Assert.assertEquals(lineExpected, lineActual);
-				if (lineActual == null) {
-					if (lineExpected.isEmpty()) {
-						continue;
-					}
-					throw new Exception("Result for " + scriptFile
-							+ " changed at line " + num + ":\n< "
-							+ lineExpected + "\n> ");
-				}
+    public static void runScriptAndCompareWithResult(File scriptFile, PrintWriter print, File expectedFile,
+            File actualFile) throws Exception {
+        BufferedReader readerExpected = new BufferedReader(new InputStreamReader(new FileInputStream(expectedFile),
+                "UTF-8"));
+        BufferedReader readerActual = new BufferedReader(
+                new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
+        String lineExpected, lineActual;
+        int num = 1;
+        try {
+            while ((lineExpected = readerExpected.readLine()) != null) {
+                lineActual = readerActual.readLine();
+                // Assert.assertEquals(lineExpected, lineActual);
+                if (lineActual == null) {
+                    if (lineExpected.isEmpty()) {
+                        continue;
+                    }
+                    throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< " + lineExpected
+                            + "\n> ");
+                }
 
-				if (!equalStrings(lineExpected.split("Timestamp")[0],
-						lineActual.split("Timestamp")[0])) {
-					fail("Result for " + scriptFile + " changed at line " + num
-							+ ":\n< " + lineExpected + "\n> " + lineActual);
-				}
+                if (!equalStrings(lineExpected.split("Timestamp")[0], lineActual.split("Timestamp")[0])) {
+                    fail("Result for " + scriptFile + " changed at line " + num + ":\n< " + lineExpected + "\n> "
+                            + lineActual);
+                }
 
-				++num;
-			}
-			lineActual = readerActual.readLine();
-			// Assert.assertEquals(null, lineActual);
-			if (lineActual != null) {
-				throw new Exception("Result for " + scriptFile
-						+ " changed at line " + num + ":\n< \n> " + lineActual);
-			}
-			// actualFile.delete();
-		} finally {
-			readerExpected.close();
-			readerActual.close();
-		}
+                ++num;
+            }
+            lineActual = readerActual.readLine();
+            // Assert.assertEquals(null, lineActual);
+            if (lineActual != null) {
+                throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< \n> " + lineActual);
+            }
+            // actualFile.delete();
+        } finally {
+            readerExpected.close();
+            readerActual.close();
+        }
 
-	}
+    }
 
-	private static boolean equalStrings(String s1, String s2) {
-		String[] rowsOne = s1.split("\n");
-		String[] rowsTwo = s2.split("\n");
+    private static boolean equalStrings(String s1, String s2) {
+        String[] rowsOne = s1.split("\n");
+        String[] rowsTwo = s2.split("\n");
 
-		for (int i = 0; i < rowsOne.length; i++) {
-			String row1 = rowsOne[i];
-			String row2 = rowsTwo[i];
+        for (int i = 0; i < rowsOne.length; i++) {
+            String row1 = rowsOne[i];
+            String row2 = rowsTwo[i];
 
-			if (row1.equals(row2))
-				continue;
+            if (row1.equals(row2))
+                continue;
 
-			String[] fields1 = row1.split(" ");
-			String[] fields2 = row2.split(" ");
+            String[] fields1 = row1.split(" ");
+            String[] fields2 = row2.split(" ");
 
-			for (int j = 0; j < fields1.length; j++) {
-				if (fields1[j].equals(fields2[j])) {
-					continue;
-				} else if (fields1[j].indexOf('.') < 0) {
-					return false;
-				} else {
-					fields1[j] = fields1[j].split(",")[0];
-					fields2[j] = fields2[j].split(",")[0];
-					Double double1 = Double.parseDouble(fields1[j]);
-					Double double2 = Double.parseDouble(fields2[j]);
-					float float1 = (float) double1.doubleValue();
-					float float2 = (float) double2.doubleValue();
+            for (int j = 0; j < fields1.length; j++) {
+                if (fields1[j].equals(fields2[j])) {
+                    continue;
+                } else if (fields1[j].indexOf('.') < 0) {
+                    return false;
+                } else {
+                    fields1[j] = fields1[j].split(",")[0];
+                    fields2[j] = fields2[j].split(",")[0];
+                    Double double1 = Double.parseDouble(fields1[j]);
+                    Double double2 = Double.parseDouble(fields2[j]);
+                    float float1 = (float) double1.doubleValue();
+                    float float2 = (float) double2.doubleValue();
 
-					if (Math.abs(float1 - float2) == 0)
-						continue;
-					else {
-						return false;
-					}
-				}
-			}
-		}
-		return true;
-	}
+                    if (Math.abs(float1 - float2) == 0)
+                        continue;
+                    else {
+                        return false;
+                    }
+                }
+            }
+        }
+        return true;
+    }
 
-	public static String aqlExtToResExt(String fname) {
-		int dot = fname.lastIndexOf('.');
-		return fname.substring(0, dot + 1) + EXTENSION_AQL_RESULT;
-	}
+    public static String aqlExtToResExt(String fname) {
+        int dot = fname.lastIndexOf('.');
+        return fname.substring(0, dot + 1) + EXTENSION_AQL_RESULT;
+    }
 
-	public static void writeResultsToFile(File actualFile, JSONObject result)
-			throws IOException, JSONException {
-		BufferedWriter writer = new BufferedWriter(new FileWriter(actualFile));
-		Results res = new Results(result);
-		for (String line : res) {
-			writer.write(line);
-			writer.newLine();
-		}
-		writer.close();
-	}
+    public static void writeResultsToFile(File actualFile, JSONObject result) throws IOException, JSONException {
+        BufferedWriter writer = new BufferedWriter(new FileWriter(actualFile));
+        Results res = new Results(result);
+        for (String line : res) {
+            writer.write(line);
+            writer.newLine();
+        }
+        writer.close();
+    }
 
-	public static class Results implements Iterable<String> {
-		private final JSONArray chunks;
+    public static class Results implements Iterable<String> {
+        private final JSONArray chunks;
 
-		public Results(JSONObject result) throws JSONException {
-			chunks = result.getJSONArray("results");
-		}
+        public Results(JSONObject result) throws JSONException {
+            chunks = result.getJSONArray("results");
+        }
 
-		public Iterator<String> iterator() {
-			return new ResultIterator(chunks);
-		}
-	}
+        public Iterator<String> iterator() {
+            return new ResultIterator(chunks);
+        }
+    }
 
-	public static class ResultIterator implements Iterator<String> {
-		private final JSONArray chunks;
+    public static class ResultIterator implements Iterator<String> {
+        private final JSONArray chunks;
 
-		private int chunkCounter = 0;
-		private int recordCounter = 0;
+        private int chunkCounter = 0;
+        private int recordCounter = 0;
 
-		public ResultIterator(JSONArray chunks) {
-			this.chunks = chunks;
-		}
+        public ResultIterator(JSONArray chunks) {
+            this.chunks = chunks;
+        }
 
-		@Override
-		public boolean hasNext() {
-			JSONArray resultArray;
-			try {
-				resultArray = chunks.getJSONArray(chunkCounter);
-				if (resultArray.getString(recordCounter) != null) {
-					return true;
-				}
-			} catch (JSONException e) {
-				return false;
-			}
-			return false;
-		}
+        @Override
+        public boolean hasNext() {
+            JSONArray resultArray;
+            try {
+                resultArray = chunks.getJSONArray(chunkCounter);
+                if (resultArray.getString(recordCounter) != null) {
+                    return true;
+                }
+            } catch (JSONException e) {
+                return false;
+            }
+            return false;
+        }
 
-		@Override
-		public String next() throws NoSuchElementException {
-			JSONArray resultArray;
-			String item = "";
+        @Override
+        public String next() throws NoSuchElementException {
+            JSONArray resultArray;
+            String item = "";
 
-			try {
-				resultArray = chunks.getJSONArray(chunkCounter);
-				item = resultArray.getString(recordCounter);
-				if (item == null) {
-					throw new NoSuchElementException();
-				}
-				item = item.trim();
+            try {
+                resultArray = chunks.getJSONArray(chunkCounter);
+                item = resultArray.getString(recordCounter);
+                if (item == null) {
+                    throw new NoSuchElementException();
+                }
+                item = item.trim();
 
-				recordCounter++;
-				if (recordCounter >= resultArray.length()) {
-					chunkCounter++;
-					recordCounter = 0;
-				}
-			} catch (JSONException e) {
-				throw new NoSuchElementException(e.getMessage());
-			}
-			return item;
-		}
+                recordCounter++;
+                if (recordCounter >= resultArray.length()) {
+                    chunkCounter++;
+                    recordCounter = 0;
+                }
+            } catch (JSONException e) {
+                throw new NoSuchElementException(e.getMessage());
+            }
+            return item;
+        }
 
-		@Override
-		public void remove() {
-			throw new NotImplementedException();
-		}
-	}
+        @Override
+        public void remove() {
+            throw new UnsupportedOperationException();
+        }
+    }
 
-	// Executes Query and returns results as JSONArray
-	public static JSONObject executeQuery(String str) throws Exception {
+    // Executes Query and returns results as JSONArray
+    public static JSONObject executeQuery(String str) throws Exception {
 
-		final String url = "http://localhost:19101/query";
+        final String url = "http://localhost:19101/query";
 
-		// Create an instance of HttpClient.
-		HttpClient client = new HttpClient();
+        // Create an instance of HttpClient.
+        HttpClient client = new HttpClient();
 
-		// Create a method instance.
-		GetMethod method = new GetMethod(url);
+        // Create a method instance.
+        GetMethod method = new GetMethod(url);
 
-		method.setQueryString(new NameValuePair[] { new NameValuePair("query",
-				str) });
+        method.setQueryString(new NameValuePair[] { new NameValuePair("query", str) });
 
-		// Provide custom retry handler is necessary
-		method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER,
-				new DefaultHttpMethodRetryHandler(3, false));
+        // Provide custom retry handler is necessary
+        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
 
-		JSONObject result = null;
+        JSONObject result = null;
 
-		try {
-			// Execute the method.
-			int statusCode = client.executeMethod(method);
+        try {
+            // Execute the method.
+            int statusCode = client.executeMethod(method);
 
-			// Check if the method was executed successfully.
-			if (statusCode != HttpStatus.SC_OK) {
-				System.err.println("Method failed: " + method.getStatusLine());
-			}
+            // Check if the method was executed successfully.
+            if (statusCode != HttpStatus.SC_OK) {
+                System.err.println("Method failed: " + method.getStatusLine());
+            }
 
-			// Read the response body as String.
-			String responseBody = method.getResponseBodyAsString();
+            // Read the response body as String.
+            String responseBody = method.getResponseBodyAsString();
 
-			result = new JSONObject(responseBody);
-		} catch (Exception e) {
-			System.out.println(e.getMessage());
-			e.printStackTrace();
-		}
-		return result;
-	}
+            result = new JSONObject(responseBody);
+        } catch (Exception e) {
+            System.out.println(e.getMessage());
+            e.printStackTrace();
+        }
+        return result;
+    }
 
-	// To execute Update statements
-	// Insert and Delete statements are executed here
-	public static void executeUpdate(String str) throws Exception {
-		final String url = "http://localhost:19101/update";
+    // To execute Update statements
+    // Insert and Delete statements are executed here
+    public static void executeUpdate(String str) throws Exception {
+        final String url = "http://localhost:19101/update";
 
-		// Create an instance of HttpClient.
-		HttpClient client = new HttpClient();
+        // Create an instance of HttpClient.
+        HttpClient client = new HttpClient();
 
-		// Create a method instance.
-		GetMethod method = new GetMethod(url);
+        // Create a method instance.
+        GetMethod method = new GetMethod(url);
 
-		method.setQueryString(new NameValuePair[] { new NameValuePair(
-				"statements", str) });
+        method.setQueryString(new NameValuePair[] { new NameValuePair("statements", str) });
 
-		// Provide custom retry handler is necessary
-		method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER,
-				new DefaultHttpMethodRetryHandler(3, false));
+        // Provide custom retry handler is necessary
+        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
 
-		// Execute the method.
-		int statusCode = client.executeMethod(method);
+        // Execute the method.
+        int statusCode = client.executeMethod(method);
 
-		// Check if the method was executed successfully.
-		if (statusCode != HttpStatus.SC_OK) {
-			System.err.println("Method failed: " + method.getStatusLine());
-		}
-	}
+        // Check if the method was executed successfully.
+        if (statusCode != HttpStatus.SC_OK) {
+            System.err.println("Method failed: " + method.getStatusLine());
+        }
+    }
 
-	// To execute DDL and Update statements
-	// create type statement
-	// create dataset statement
-	// create index statement
-	// create dataverse statement
-	// create function statement
-	public static void executeDDL(String str) throws Exception {
-		final String url = "http://localhost:19101/ddl";
+    // To execute DDL and Update statements
+    // create type statement
+    // create dataset statement
+    // create index statement
+    // create dataverse statement
+    // create function statement
+    public static void executeDDL(String str) throws Exception {
+        final String url = "http://localhost:19101/ddl";
 
-		// Create an instance of HttpClient.
-		HttpClient client = new HttpClient();
+        // Create an instance of HttpClient.
+        HttpClient client = new HttpClient();
 
-		// Create a method instance.
-		GetMethod method = new GetMethod(url);
+        // Create a method instance.
+        GetMethod method = new GetMethod(url);
 
-		method.setQueryString(new NameValuePair[] { new NameValuePair("ddl",
-				str) });
+        method.setQueryString(new NameValuePair[] { new NameValuePair("ddl", str) });
 
-		// Provide custom retry handler is necessary
-		method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER,
-				new DefaultHttpMethodRetryHandler(3, false));
+        // Provide custom retry handler is necessary
+        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
 
-		// Execute the method.
-		int statusCode = client.executeMethod(method);
+        // Execute the method.
+        int statusCode = client.executeMethod(method);
 
-		// Check if the method was executed successfully.
-		if (statusCode != HttpStatus.SC_OK) {
-			System.err.println("Method failed: " + method.getStatusLine());
-		}
-	}
+        // Check if the method was executed successfully.
+        if (statusCode != HttpStatus.SC_OK) {
+            System.err.println("Method failed: " + method.getStatusLine());
+        }
+    }
 
-	// Method that reads a DDL/Update/Query File
-	// and returns the contents as a string
-	// This string is later passed to REST API for execution.
-	public static String readTestFile(File testFile) throws Exception {
-		BufferedReader reader = new BufferedReader(new FileReader(testFile));
-		String line = null;
-		StringBuilder stringBuilder = new StringBuilder();
-		String ls = System.getProperty("line.separator");
+    // Method that reads a DDL/Update/Query File
+    // and returns the contents as a string
+    // This string is later passed to REST API for execution.
+    public static String readTestFile(File testFile) throws Exception {
+        BufferedReader reader = new BufferedReader(new FileReader(testFile));
+        String line = null;
+        StringBuilder stringBuilder = new StringBuilder();
+        String ls = System.getProperty("line.separator");
 
-		while ((line = reader.readLine()) != null) {
-			stringBuilder.append(line);
-			stringBuilder.append(ls);
-		}
+        while ((line = reader.readLine()) != null) {
+            stringBuilder.append(line);
+            stringBuilder.append(ls);
+        }
 
-		return stringBuilder.toString();
-	}
+        return stringBuilder.toString();
+    }
 
-	public static void executeManagixCommand(String command)
-			throws ClassNotFoundException, NoSuchMethodException,
-			SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
-		if (managixExecuteMethod == null) {
-			Class clazz = Class
-					.forName("edu.uci.ics.asterix.installer.test.AsterixInstallerIntegrationUtil");
-			managixExecuteMethod = clazz.getMethod("executeCommand",
-					String.class);
-		}
-		managixExecuteMethod.invoke(null, command);
-	}
+    public static void executeManagixCommand(String command) throws ClassNotFoundException, NoSuchMethodException,
+            SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
+        if (managixExecuteMethod == null) {
+            Class clazz = Class.forName("edu.uci.ics.asterix.installer.test.AsterixInstallerIntegrationUtil");
+            managixExecuteMethod = clazz.getMethod("executeCommand", String.class);
+        }
+        managixExecuteMethod.invoke(null, command);
+    }
 
-	public static void executeTest(String actualPath,
-			TestCaseContext testCaseCtx) throws Exception {
+    public static void executeTest(String actualPath, TestCaseContext testCaseCtx) throws Exception {
 
-		File testFile;
-		File expectedResultFile;
-		String statement;
-		List<TestFileContext> expectedResultFileCtxs;
-		List<TestFileContext> testFileCtxs;
+        File testFile;
+        File expectedResultFile;
+        String statement;
+        List<TestFileContext> expectedResultFileCtxs;
+        List<TestFileContext> testFileCtxs;
 
-		int queryCount = 0;
-		JSONObject result;
+        int queryCount = 0;
+        JSONObject result;
 
-		List<CompilationUnit> cUnits = testCaseCtx.getTestCase()
-				.getCompilationUnit();
-		for (CompilationUnit cUnit : cUnits) {
-			LOGGER.info("[TEST]: " + testCaseCtx.getTestCase().getFilePath()
-					+ "/" + cUnit.getName());
+        List<CompilationUnit> cUnits = testCaseCtx.getTestCase().getCompilationUnit();
+        for (CompilationUnit cUnit : cUnits) {
+            LOGGER.info("[TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName());
 
-			testFileCtxs = testCaseCtx.getTestFiles(cUnit);
-			expectedResultFileCtxs = testCaseCtx.getExpectedResultFiles(cUnit);
+            testFileCtxs = testCaseCtx.getTestFiles(cUnit);
+            expectedResultFileCtxs = testCaseCtx.getExpectedResultFiles(cUnit);
 
-			for (TestFileContext ctx : testFileCtxs) {
-				testFile = ctx.getFile();
-				statement = TestsUtils.readTestFile(testFile);
-				try {
-					switch (ctx.getType()) {
-					case "ddl":
-						TestsUtils.executeDDL(statement);
-						break;
-					case "update":
-						TestsUtils.executeUpdate(statement);
-						break;
-					case "query":
-						result = TestsUtils.executeQuery(statement);
-						if (!cUnit.getExpectedError().isEmpty()) {
-							if (!result.has("error")) {
-								throw new Exception("Test \"" + testFile
-										+ "\" FAILED!");
-							}
-						} else {
-							expectedResultFile = expectedResultFileCtxs.get(
-									queryCount).getFile();
+            for (TestFileContext ctx : testFileCtxs) {
+                testFile = ctx.getFile();
+                statement = TestsUtils.readTestFile(testFile);
+                try {
+                    switch (ctx.getType()) {
+                        case "ddl":
+                            TestsUtils.executeDDL(statement);
+                            break;
+                        case "update":
+                            TestsUtils.executeUpdate(statement);
+                            break;
+                        case "query":
+                            result = TestsUtils.executeQuery(statement);
+                            if (!cUnit.getExpectedError().isEmpty()) {
+                                if (!result.has("error")) {
+                                    throw new Exception("Test \"" + testFile + "\" FAILED!");
+                                }
+                            } else {
+                                expectedResultFile = expectedResultFileCtxs.get(queryCount).getFile();
 
-							File actualFile = new File(actualPath
-									+ File.separator
-									+ testCaseCtx.getTestCase().getFilePath()
-											.replace(File.separator, "_") + "_"
-									+ cUnit.getName() + ".adm");
+                                File actualFile = new File(actualPath + File.separator
+                                        + testCaseCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_"
+                                        + cUnit.getName() + ".adm");
 
-							File actualResultFile = testCaseCtx
-									.getActualResultFile(cUnit, new File(
-											actualPath));
-							actualResultFile.getParentFile().mkdirs();
+                                File actualResultFile = testCaseCtx.getActualResultFile(cUnit, new File(actualPath));
+                                actualResultFile.getParentFile().mkdirs();
 
-							TestsUtils.writeResultsToFile(actualFile, result);
+                                TestsUtils.writeResultsToFile(actualFile, result);
 
-							TestsUtils.runScriptAndCompareWithResult(testFile,
-									new PrintWriter(System.err),
-									expectedResultFile, actualFile);
-						}
-						queryCount++;
-						break;
-					case "mgx":
-						executeManagixCommand(statement);
-						break;
-					default:
-						throw new IllegalArgumentException(
-								"No statements of type " + ctx.getType());
-					}
-				} catch (Exception e) {
-					if (cUnit.getExpectedError().isEmpty()) {
-						throw new Exception(
-								"Test \"" + testFile + "\" FAILED!", e);
-					}
-				}
-			}
-		}
+                                TestsUtils.runScriptAndCompareWithResult(testFile, new PrintWriter(System.err),
+                                        expectedResultFile, actualFile);
+                            }
+                            queryCount++;
+                            break;
+                        case "mgx":
+                            executeManagixCommand(statement);
+                            break;
+                        default:
+                            throw new IllegalArgumentException("No statements of type " + ctx.getType());
+                    }
+                } catch (Exception e) {
+                    if (cUnit.getExpectedError().isEmpty()) {
+                        throw new Exception("Test \"" + testFile + "\" FAILED!", e);
+                    }
+                }
+            }
+        }
 
-	}
+    }
 }
diff --git a/asterix-doc/pom.xml b/asterix-doc/pom.xml
new file mode 100644
index 0000000..f44c06d
--- /dev/null
+++ b/asterix-doc/pom.xml
@@ -0,0 +1,18 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<artifactId>asterix</artifactId>
+		<groupId>edu.uci.ics.asterix</groupId>
+		<version>0.0.6-SNAPSHOT</version>
+	</parent>
+	<artifactId>asterix-doc</artifactId>
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-site-plugin</artifactId>
+				<version>3.3</version>
+			</plugin>
+		</plugins>
+	</build>
+</project>
diff --git a/asterix-doc/src/site/markdown/aql.md b/asterix-doc/src/site/markdown/aql.md
new file mode 100644
index 0000000..c03d7e5
--- /dev/null
+++ b/asterix-doc/src/site/markdown/aql.md
@@ -0,0 +1,191 @@
+# The Asterix Query Language, Version 1.0
+## 1. Introduction
+
+This document provides an overview of the Asterix Query language.
+
+## 2. Expressions
+
+    Expression ::= ( OperatorExpr | IfThenElse | FLWOGR | QuantifiedExpression )
+
+### Primary Expressions
+
+    PrimaryExpr ::= Literal | VariableRef | ParenthesizedExpression | FunctionCallExpr
+                  | DatasetAccessExpression | ListConstructor | RecordConstructor
+
+#### Literals
+
+    Literal ::= StringLiteral | <INTEGER_LITERAL> | <FLOAT_LITERAL> | <DOUBLE_LITERAL> | <NULL> | <TRUE> | <FALSE>
+    StringLiteral ::= <STRING_LITERAL>
+
+#### Variable References
+
+    VariableRef ::= <VARIABLE>
+    
+#### Parenthesized Expressions
+    
+    ParenthesizedExpression ::= <LEFTPAREN> Expression <RIGHTPAREN>
+
+#### Function Calls
+
+    FunctionCallExpr ::= FunctionOrTypeName <LEFTPAREN> ( Expression ( "," Expression )* )? <RIGHTPAREN>
+    
+#### Dataset Access
+
+    DatasetAccessExpression ::= <DATASET> ( ( Identifier ( "." Identifier )? )
+                              | ( <LEFTPAREN> Expression ( "," Expression )* <RIGHTPAREN> ) )
+    Identifier              ::= <IDENTIFIER> | StringLiteral
+
+#### Constructors
+
+    ListConstructor          ::= ( OrderedListConstructor | UnorderedListConstructor )
+    OrderedListConstructor   ::= "[" ( Expression ( "," Expression )* )? "]"
+    UnorderedListConstructor ::= "{{" ( Expression ( "," Expression )* )? "}}"
+    RecordConstructor        ::= "{" ( FieldBinding ( "," FieldBinding )* )? "}"
+    FieldBinding             ::= Expression ":" Expression
+
+### Path Expressions
+
+    ValueExpr ::= PrimaryExpr ( Field | Index )*
+    Field     ::= "." Identifier
+    Index     ::= "[" ( Expression | "?" ) "]"
+
+### Logical Expressions
+
+    OperatorExpr ::= AndExpr ( "or" AndExpr )*
+    AndExpr      ::= RelExpr ( "and" RelExpr )*
+    
+### Comparison Expressions
+
+    RelExpr ::= AddExpr ( ( "<" | ">" | "<=" | ">=" | "=" | "!=" | "~=" ) AddExpr )?
+
+### Arithmetic Expressions
+
+    AddExpr  ::= MultExpr ( ( "+" | "-" ) MultExpr )*
+    MultExpr ::= UnaryExpr ( ( "*" | "/" | "%" | <CARET> | "idiv" ) UnaryExpr )*
+    UnaryExpr ::= ( ( "+" | "-" ) )? ValueExpr
+
+###  FLWOGR Expression   
+    
+    FLWOGR         ::= ( ForClause | LetClause ) ( Clause )* "return" Expression
+    Clause         ::= ForClause | LetClause | WhereClause | OrderbyClause
+                     | GroupClause | LimitClause | DistinctClause
+    ForClause      ::= "for" Variable ( "at" Variable )? "in" ( Expression )
+    LetClause      ::= "let" Variable ":=" Expression
+    WhereClause    ::= "where" Expression
+    OrderbyClause  ::= "order" "by" Expression ( ( "asc" ) | ( "desc" ) )? 
+                       ( "," Expression ( ( "asc" ) | ( "desc" ) )? )*
+    GroupClause    ::= "group" "by" ( Variable ":=" )? Expression ( "," ( Variable ":=" )? Expression )*          
+                       "with" VariableRef ( "," VariableRef )*
+    LimitClause    ::= "limit" Expression ( "offset" Expression )?
+    DistinctClause ::= "distinct" "by" Expression ( "," Expression )*
+    Variable       ::= <VARIABLE>
+
+
+### Conditional Expression
+    
+    IfThenElse ::= "if" <LEFTPAREN> Expression <RIGHTPAREN> "then" Expression "else" Expression
+
+
+### Quantified Expressions
+    
+    QuantifiedExpression ::= ( ( "some" ) | ( "every" ) ) Variable "in" Expression 
+                             ( "," Variable "in" Expression )* "satisfies" Expression
+
+
+## 3. Statements
+
+    Statement ::= ( SingleStatement ( ";" )? )* <EOF>
+    SingleStatement ::= DataverseDeclaration
+                      | FunctionDeclaration
+                      | CreateStatement
+                      | DropStatement
+                      | LoadStatement
+                      | SetStatement
+                      | InsertStatement
+                      | DeleteStatement
+                      | FeedStatement
+                      | Query
+    
+### Declarations    
+    
+    DataverseDeclaration ::= "use" "dataverse" Identifier
+    SetStatement         ::= "set" Identifier StringLiteral
+    FunctionDeclaration  ::= "declare" "function" Identifier <LEFTPAREN> ( <VARIABLE> ( "," <VARIABLE> )* )? <RIGHTPAREN> "{" Expression "}"
+
+### Lifecycle Management Statements
+
+    CreateStatement ::= "create" ( TypeSpecification | DatasetSpecification | IndexSpecification | DataverseSpecification | FunctionSpecification )
+
+    DropStatement       ::= "drop" ( <DATASET> QualifiedName IfExists
+                                   | "index" DoubleQualifiedName IfExists
+                                   | "type" FunctionOrTypeName IfExists
+                                   | "dataverse" Identifier IfExists
+                                   | "function" FunctionSignature IfExists )
+    IfExists            ::= ( "if" "exists" )?
+    QualifiedName       ::= Identifier ( "." Identifier )?
+    DoubleQualifiedName ::= Identifier "." Identifier ( "." Identifier )?
+
+#### Types
+
+    TypeSpecification    ::= "type" FunctionOrTypeName IfNotExists "as" TypeExpr
+    FunctionOrTypeName   ::= QualifiedName
+    IfNotExists          ::= ( "if not exists" )?
+    TypeExpr             ::= RecordTypeDef | TypeReference | OrderedListTypeDef | UnorderedListTypeDef
+    RecordTypeDef        ::= ( "closed" | "open" )? "{" ( RecordField ( "," RecordField )* )? "}"
+    RecordField          ::= Identifier ":" ( TypeExpr ) ( "?" )?
+    TypeReference        ::= Identifier
+    OrderedListTypeDef   ::= "[" ( TypeExpr ) "]"
+    UnorderedListTypeDef ::= "{{" ( TypeExpr ) "}}"
+    
+#### Datasets
+
+    DatasetSpecification ::= "external" <DATASET> QualifiedName <LEFTPAREN> Identifier <RIGHTPAREN> IfNotExists 
+                                 "using" AdapterName Configuration ( "hints" Properties )? 
+                           | "feed" <DATASET> QualifiedName <LEFTPAREN> Identifier <RIGHTPAREN> IfNotExists
+                                 "using" AdapterName Configuration ( ApplyFunction )? PrimaryKey ( "on" Identifier )? ( "hints" Properties )? 
+                           | <DATASET> QualifiedName <LEFTPAREN> Identifier <RIGHTPAREN> IfNotExists
+                             PrimaryKey ( "on" Identifier )? ( "hints" Properties )?
+    AdapterName          ::= Identifier
+    Configuration        ::= <LEFTPAREN> ( KeyValuePair ( "," KeyValuePair )* )? <RIGHTPAREN>
+    KeyValuePair         ::= <LEFTPAREN> StringLiteral "=" StringLiteral <RIGHTPAREN>
+    Properties           ::= ( <LEFTPAREN> Property ( "," Property )* <RIGHTPAREN> )?
+    Property             ::= Identifier "=" ( StringLiteral | <INTEGER_LITERAL> )
+    ApplyFunction        ::= "apply" "function" FunctionSignature
+    FunctionSignature    ::= FunctionOrTypeName "@" <INTEGER_LITERAL>
+    PrimaryKey           ::= "primary" "key" Identifier ( "," Identifier )*
+
+#### Indices
+
+    IndexSpecification ::= "index" Identifier IfNotExists "on" QualifiedName <LEFTPAREN> ( Identifier ) ( "," Identifier )* <RIGHTPAREN> ( "type" IndexType )?
+    IndexType          ::= "btree" | "rtree" | "keyword" | "fuzzy keyword" | "ngram" <LEFTPAREN> <INTEGER_LITERAL> <RIGHTPAREN> | "fuzzy ngram" <LEFTPAREN> <INTEGER_LITERAL> <RIGHTPAREN>
+
+#### Dataverses
+
+    DataverseSpecification ::= "dataverse" Identifier IfNotExists ( "with format" StringLiteral )?
+
+#### Functions
+
+    FunctionSpecification ::= "function" FunctionOrTypeName IfNotExists <LEFTPAREN> ( <VARIABLE> ( "," <VARIABLE> )* )? <RIGHTPAREN> "{" Expression "}"
+    
+
+### Import/Export Statements
+
+    LoadStatement  ::= "load" <DATASET> QualifiedName "using" AdapterName Configuration ( "pre-sorted" )?
+
+### Modification Statements
+
+    InsertStatement ::= "insert" "into" <DATASET> QualifiedName Query
+    DeleteStatement ::= "delete" Variable "from" <DATASET> QualifiedName ( "where" Expression )?
+
+### Feed Management Statements
+
+    FeedStatement ::= "begin" "feed" QualifiedName
+                    | "suspend" "feed" QualifiedName
+                    | "resume" "feed" QualifiedName
+                    | "end" "feed" QualifiedName
+                    | "alter" "feed" QualifiedName "set" Configuration
+
+### Queries
+
+    Query ::= Expression
+    
diff --git a/asterix-events/pom.xml b/asterix-events/pom.xml
index 7b187bb..94042e1 100644
--- a/asterix-events/pom.xml
+++ b/asterix-events/pom.xml
@@ -166,5 +166,12 @@
       <artifactId>commons-io</artifactId>
       <version>1.4</version>
     </dependency>
+   <dependency>
+     <groupId>edu.uci.ics.asterix</groupId>
+     <artifactId>asterix-common</artifactId>
+     <version>0.0.6-SNAPSHOT</version>
+     <type>jar</type>
+     <scope>compile</scope>
+    </dependency>
   </dependencies>
 </project>
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
index 9b0a41a..3972e72 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
@@ -41,7 +41,7 @@
 public class EventDriver {
 
     public static final String CLIENT_NODE_ID = "client_node";
-    public static final Node CLIENT_NODE = new Node(CLIENT_NODE_ID, "127.0.0.1", null, null, null, null, null, null);
+    public static final Node CLIENT_NODE = new Node(CLIENT_NODE_ID, "127.0.0.1", null, null, null, null, null);
 
     private static String eventsDir;
     private static Events events;
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java
index 6d89c88..9f75973 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java
@@ -32,73 +32,95 @@
 
 public class EventExecutor {
 
-    public static final String EVENTS_DIR = "events";
-    private static final String EXECUTE_SCRIPT = "execute.sh";
-    private static final String IP_LOCATION = "IP_LOCATION";
-    private static final String CLUSTER_ENV = "ENV";
-    private static final String SCRIPT = "SCRIPT";
-    private static final String ARGS = "ARGS";
-    private static final String DAEMON = "DAEMON";
+	public static final String EVENTS_DIR = "events";
+	private static final String EXECUTE_SCRIPT = "execute.sh";
+	private static final String IP_LOCATION = "IP_LOCATION";
+	private static final String CLUSTER_ENV = "ENV";
+	private static final String SCRIPT = "SCRIPT";
+	private static final String ARGS = "ARGS";
+	private static final String DAEMON = "DAEMON";
 
-    public void executeEvent(Node node, String script, List<String> args, boolean isDaemon, Cluster cluster,
-            Pattern pattern, IOutputHandler outputHandler, EventrixClient client) throws IOException {
-        List<String> pargs = new ArrayList<String>();
-        pargs.add("/bin/bash");
-        pargs.add(client.getEventsDir() + File.separator + "scripts" + File.separator + EXECUTE_SCRIPT);
-        StringBuffer envBuffer = new StringBuffer(IP_LOCATION + "=" + node.getClusterIp() + " ");
-        if (!node.getId().equals(EventDriver.CLIENT_NODE_ID) && cluster.getEnv() != null) {
-            for (Property p : cluster.getEnv().getProperty()) {
-                if (p.getKey().equals("JAVA_HOME")) {
-                    String val = node.getJavaHome() == null ? p.getValue() : node.getJavaHome();
-                    envBuffer.append(p.getKey() + "=" + val + " ");
-                } else if (p.getKey().equals("JAVA_OPTS")) {
-                    StringBuilder builder = new StringBuilder();
-                    builder.append("\"");
-                    String javaOpts = (node.getJavaOpts() == null ? cluster.getJavaOpts() : node.getJavaOpts());
-                    if (javaOpts != null) {
-                        builder.append(javaOpts);
-                    }
-                    if (cluster.isDebugEnabled() != null && cluster.isDebugEnabled().booleanValue()) {
-                        BigInteger debugPort = node.getDebug() == null ? cluster.getDebug() : node.getDebug();
-                        if (debugPort != null) {
-                            builder.append("-Xdebug -Xrunjdwp:transport=dt_socket,address=" + debugPort.intValue()
-                                    + "," + "server=y,suspend=n");
-                        }
-                    }
-                    builder.append("\"");
-                    envBuffer.append(p.getKey() + "=" + builder + " ");
-                } else {
-                    envBuffer.append(p.getKey() + "=" + p.getValue() + " ");
-                }
+	public void executeEvent(Node node, String script, List<String> args,
+			boolean isDaemon, Cluster cluster, Pattern pattern,
+			IOutputHandler outputHandler, EventrixClient client)
+			throws IOException {
+		List<String> pargs = new ArrayList<String>();
+		pargs.add("/bin/bash");
+		pargs.add(client.getEventsDir() + File.separator + "scripts"
+				+ File.separator + EXECUTE_SCRIPT);
+		StringBuffer envBuffer = new StringBuffer(IP_LOCATION + "="
+				+ node.getClusterIp() + " ");
+		boolean isMasterNode = node.getId().equals(
+				cluster.getMasterNode().getId());
 
-            }
-            pargs.add(cluster.getUsername() == null ? System.getProperty("user.name") : cluster.getUsername());
-        }
+		if (!node.getId().equals(EventDriver.CLIENT_NODE_ID)
+				&& cluster.getEnv() != null) {
+			for (Property p : cluster.getEnv().getProperty()) {
+				if (p.getKey().equals("JAVA_HOME")) {
+					String val = node.getJavaHome() == null ? p.getValue()
+							: node.getJavaHome();
+					envBuffer.append(p.getKey() + "=" + val + " ");
+				} else if (p.getKey().equals(EventUtil.NC_JAVA_OPTS)) {
+					if (!isMasterNode) {
+						StringBuilder builder = new StringBuilder();
+						builder.append("\"");
+						String javaOpts = p.getValue();
+						if (javaOpts != null) {
+							builder.append(javaOpts);
+						}
+						builder.append("\"");
+						envBuffer.append("JAVA_OPTS" + "=" + builder + " ");
+					}
+				} else if (p.getKey().equals(EventUtil.CC_JAVA_OPTS)) {
+					if (isMasterNode) {
+						StringBuilder builder = new StringBuilder();
+						builder.append("\"");
+						String javaOpts = p.getValue();
+						if (javaOpts != null) {
+							builder.append(javaOpts);
+						}
+						builder.append("\"");
+						envBuffer.append("JAVA_OPTS" + "=" + builder + " ");
+					}
+				} else if (p.getKey().equals("LOG_DIR")) {
+					String val = node.getLogDir() == null ? p.getValue() : node
+							.getLogDir();
+					envBuffer.append(p.getKey() + "=" + val + " ");
+				} else {
+					envBuffer.append(p.getKey() + "=" + p.getValue() + " ");
+				}
 
-        StringBuffer argBuffer = new StringBuffer();
-        if (args != null && args.size() > 0) {
-            for (String arg : args) {
-                argBuffer.append(arg + " ");
-            }
-        }
+			}
+			pargs.add(cluster.getUsername() == null ? System
+					.getProperty("user.name") : cluster.getUsername());
+		}
 
-        ProcessBuilder pb = new ProcessBuilder(pargs);
-        pb.environment().put(IP_LOCATION, node.getClusterIp());
-        pb.environment().put(CLUSTER_ENV, envBuffer.toString());
-        pb.environment().put(SCRIPT, script);
-        pb.environment().put(ARGS, argBuffer.toString());
-        pb.environment().put(DAEMON, isDaemon ? "true" : "false");
+		StringBuffer argBuffer = new StringBuffer();
+		if (args != null && args.size() > 0) {
+			for (String arg : args) {
+				argBuffer.append(arg + " ");
+			}
+		}
 
-        Process p = pb.start();
-        if (!isDaemon) {
-            BufferedInputStream bis = new BufferedInputStream(p.getInputStream());
-            StringWriter writer = new StringWriter();
-            IOUtils.copy(bis, writer, "UTF-8");
-            String result = writer.getBuffer().toString();
-            OutputAnalysis analysis = outputHandler.reportEventOutput(pattern.getEvent(), result);
-            if (!analysis.isExpected()) {
-                throw new IOException(analysis.getErrorMessage() + result);
-            }
-        }
-    }
+		ProcessBuilder pb = new ProcessBuilder(pargs);
+		pb.environment().put(IP_LOCATION, node.getClusterIp());
+		pb.environment().put(CLUSTER_ENV, envBuffer.toString());
+		pb.environment().put(SCRIPT, script);
+		pb.environment().put(ARGS, argBuffer.toString());
+		pb.environment().put(DAEMON, isDaemon ? "true" : "false");
+
+		Process p = pb.start();
+		if (!isDaemon) {
+			BufferedInputStream bis = new BufferedInputStream(p
+					.getInputStream());
+			StringWriter writer = new StringWriter();
+			IOUtils.copy(bis, writer, "UTF-8");
+			String result = writer.getBuffer().toString();
+			OutputAnalysis analysis = outputHandler.reportEventOutput(pattern
+					.getEvent(), result);
+			if (!analysis.isExpected()) {
+				throw new IOException(analysis.getErrorMessage() + result);
+			}
+		}
+	}
 }
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
index 1c51022..d047698 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
@@ -36,10 +36,13 @@
 
 public class EventUtil {
 
-    public static final String EVENTS_DIR = "events";
-    public static final String CLUSTER_CONF = "config/cluster.xml";
-    public static final String PATTERN_CONF = "config/pattern.xml";
-    public static final DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
+	public static final String EVENTS_DIR = "events";
+	public static final String CLUSTER_CONF = "config/cluster.xml";
+	public static final String PATTERN_CONF = "config/pattern.xml";
+	public static final DateFormat dateFormat = new SimpleDateFormat(
+			"yyyy/MM/dd HH:mm:ss");
+	public static final String NC_JAVA_OPTS = "nc.java.opts";
+	public static final String CC_JAVA_OPTS = "cc.java.opts";
 
     private static final String IP_LOCATION = "IP_LOCATION";
     private static final String CLUSTER_ENV = "ENV";
@@ -49,21 +52,22 @@
     private static final String LOCALHOST = "localhost";
     private static final String LOCALHOST_IP = "127.0.0.1";
 
-    public static Cluster getCluster(String clusterConfigurationPath) throws JAXBException {
-        File file = new File(clusterConfigurationPath);
-        JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
-        Unmarshaller unmarshaller = ctx.createUnmarshaller();
-        Cluster cluster = (Cluster) unmarshaller.unmarshal(file);
-        if (cluster.getMasterNode().getClusterIp().equals(LOCALHOST)) {
-            cluster.getMasterNode().setClusterIp(LOCALHOST_IP);
-        }
-        for (Node node : cluster.getNode()) {
-            if (node.getClusterIp().equals(LOCALHOST)) {
-                node.setClusterIp(LOCALHOST_IP);
-            }
-        }
-        return cluster;
-    }
+	public static Cluster getCluster(String clusterConfigurationPath)
+			throws JAXBException {
+		File file = new File(clusterConfigurationPath);
+		JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
+		Unmarshaller unmarshaller = ctx.createUnmarshaller();
+		Cluster cluster = (Cluster) unmarshaller.unmarshal(file);
+		if (cluster.getMasterNode().getClusterIp().equals(LOCALHOST)) {
+			cluster.getMasterNode().setClusterIp(LOCALHOST_IP);
+		}
+		for (Node node : cluster.getNode()) {
+			if (node.getClusterIp().equals(LOCALHOST)) {
+				node.setClusterIp(LOCALHOST_IP);
+			}
+		}
+		return cluster;
+	}
 
     public static long parseTimeInterval(ValueType v, String unit) throws IllegalArgumentException {
         int val = 0;
@@ -190,17 +194,17 @@
             return EventDriver.CLIENT_NODE;
         }
 
-        if (nodeid.equals(cluster.getMasterNode().getId())) {
-            String javaOpts = cluster.getMasterNode().getJavaOpts() == null ? cluster.getJavaOpts() : cluster
-                    .getMasterNode().getJavaOpts();
-            String logDir = cluster.getMasterNode().getLogdir() == null ? cluster.getLogdir() : cluster.getMasterNode()
-                    .getLogdir();
-            String javaHome = cluster.getMasterNode().getJavaHome() == null ? cluster.getJavaHome() : cluster
-                    .getMasterNode().getJavaHome();
-            BigInteger debug = cluster.getMasterNode().getDebug();
-            return new Node(cluster.getMasterNode().getId(), cluster.getMasterNode().getClusterIp(), javaHome,
-                    javaOpts, logDir, null, null, debug);
-        }
+		if (nodeid.equals(cluster.getMasterNode().getId())) {
+			String logDir = cluster.getMasterNode().getLogDir() == null ? cluster
+					.getLogDir()
+					: cluster.getMasterNode().getLogDir();
+			String javaHome = cluster.getMasterNode().getJavaHome() == null ? cluster
+					.getJavaHome()
+					: cluster.getMasterNode().getJavaHome();
+			return new Node(cluster.getMasterNode().getId(), cluster
+					.getMasterNode().getClusterIp(), javaHome, logDir, null,
+					null, null);
+		}
 
         List<Node> nodeList = cluster.getNode();
         for (Node node : nodeList) {
diff --git a/asterix-events/src/main/resources/events/backup/backup.sh b/asterix-events/src/main/resources/events/backup/backup.sh
index 556ca39..fc6e3cc 100755
--- a/asterix-events/src/main/resources/events/backup/backup.sh
+++ b/asterix-events/src/main/resources/events/backup/backup.sh
@@ -3,7 +3,7 @@
 ASTERIX_IODEVICES=$3
 NODE_STORE=$4
 ASTERIX_ROOT_METADATA_DIR=$5
-TXN_LOG_DIR_NAME=$6
+TXN_LOG_DIR=$6
 BACKUP_ID=$7
 BACKUP_DIR=$8
 BACKUP_TYPE=$9
@@ -20,7 +20,6 @@
   for nodeIODevice in $nodeIODevices
   do
     STORE_DIR=$nodeIODevice/$NODE_STORE
-    TXN_LOG_DIR=$nodeIODevice/$TXN_LOG_DIR_NAME
     NODE_BACKUP_DIR=$BACKUP_DIR/$ASTERIX_INSTANCE_NAME/$BACKUP_ID/$NODE_ID/
    
     # make the destination directory 
@@ -46,7 +45,6 @@
   for nodeIODevice in $nodeIODevices
   do
     STORE_DIR=$nodeIODevice/$NODE_STORE
-    TXN_LOG_DIR=$nodeIODevice/$TXN_LOG_DIR_NAME
     NODE_BACKUP_DIR=$BACKUP_DIR/$ASTERIX_INSTANCE_NAME/$BACKUP_ID/$NODE_ID
 
     # create the backup directory, if it does not exists
diff --git a/asterix-events/src/main/resources/events/restore/restore.sh b/asterix-events/src/main/resources/events/restore/restore.sh
index 6396eec..88c5a6f 100755
--- a/asterix-events/src/main/resources/events/restore/restore.sh
+++ b/asterix-events/src/main/resources/events/restore/restore.sh
@@ -3,7 +3,7 @@
 ASTERIX_IODEVICES=$3
 NODE_STORE=$4
 ASTERIX_ROOT_METADATA_DIR=$5
-TXN_LOG_DIR_NAME=$6
+TXN_LOG_DIR=$6
 BACKUP_ID=$7
 BACKUP_DIR=$8
 BACKUP_TYPE=$9
@@ -33,8 +33,9 @@
   rm -rf $DEST_STORE_DIR/$SOURCE_STORE_DIR
 
   # remove the existing log directory
-  DEST_LOG_DIR=$iodevice/$TXN_LOG_DIR_NAME/
-  rm -rf $DEST_LOG_DIR
+  DEST_LOG_DIR=$TXN_LOG_DIR
+  rm -rf $DEST_LOG_DIR/*
+  TXN_LOG_DIR_NAME=${TXN_LOG_DIR%/*}
 
   # remove the existing asterix metadata directory
   rm -rf $iodevice/$ASTERIX_ROOT_METADATA_DIR
@@ -52,7 +53,7 @@
         $HADOOP_HOME/bin/hadoop fs -copyToLocal $HDFS_URL/$NODE_BACKUP_DIR/$ASTERIX_ROOT_METADATA_DIR $iodevice/
 
         # copy transaction logs directory
-        $HADOOP_HOME/bin/hadoop fs -copyToLocal $HDFS_URL/$NODE_BACKUP_DIR/$TXN_LOG_DIR_NAME $iodevice/
+        $HADOOP_HOME/bin/hadoop fs -copyToLocal $HDFS_URL/$NODE_BACKUP_DIR/$TXN_LOG_DIR_NAME $$TXN_LOG_DIR/
       fi
 
   else
@@ -67,7 +68,7 @@
         cp -r $NODE_BACKUP_DIR/$ASTERIX_ROOT_METADATA_DIR $iodevice/
 
         # copy transaction logs directory
-        cp -r $NODE_BACKUP_DIR/$TXN_LOG_DIR_NAME $iodevice/
+        cp -r $NODE_BACKUP_DIR/$TXN_LOG_DIR_NAME $TXN_LOG_DIR/
       fi
 
   fi
diff --git a/asterix-events/src/main/resources/schema/cluster.xsd b/asterix-events/src/main/resources/schema/cluster.xsd
index 0e1adce..718d7b0 100644
--- a/asterix-events/src/main/resources/schema/cluster.xsd
+++ b/asterix-events/src/main/resources/schema/cluster.xsd
@@ -1,98 +1,92 @@
 <?xml version="1.0" encoding="ISO-8859-1" ?>
-<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:cl="cluster" targetNamespace="cluster" elementFormDefault="qualified">
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+	xmlns:cl="cluster" targetNamespace="cluster" elementFormDefault="qualified">
 
-<!-- definition of simple types --> 
-<xs:element name="name" type="xs:string"/>
-<xs:element name="java_opts" type="xs:string"/>
-<xs:element name="logdir" type="xs:string"/>
-<xs:element name="id" type="xs:string"/>
-<xs:element name="client-ip" type="xs:string"/>
-<xs:element name="cluster-ip" type="xs:string"/>
-<xs:element name="key" type="xs:string"/>
-<xs:element name="value" type="xs:string"/>
-<xs:element name="dir" type="xs:string"/>
-<xs:element name="NFS" type="xs:boolean"/>
-<xs:element name="store" type="xs:string"/>
-<xs:element name="iodevices" type="xs:string"/>
-<xs:element name="java_home" type="xs:string"/>
-<xs:element name="username" type="xs:string"/>
-<xs:element name="debug" type="xs:integer"/>
-<xs:element name="debugEnabled" type="xs:boolean"/>
+	<!-- definition of simple types -->
+	<xs:element name="name" type="xs:string" />
+	<xs:element name="log_dir" type="xs:string" />
+	<xs:element name="txn_log_dir" type="xs:string" />
+	<xs:element name="id" type="xs:string" />
+	<xs:element name="client_ip" type="xs:string" />
+	<xs:element name="cluster_ip" type="xs:string" />
+	<xs:element name="key" type="xs:string" />
+	<xs:element name="value" type="xs:string" />
+	<xs:element name="dir" type="xs:string" />
+	<xs:element name="NFS" type="xs:boolean" />
+	<xs:element name="store" type="xs:string" />
+	<xs:element name="iodevices" type="xs:string" />
+	<xs:element name="java_home" type="xs:string" />
+	<xs:element name="username" type="xs:string" />
 
-<!-- definition of complex elements -->
-<xs:element name="workingDir">
-  <xs:complexType>
-    <xs:sequence>
-      <xs:element ref="cl:dir"/>
-      <xs:element ref="cl:NFS"/>
-    </xs:sequence>
-  </xs:complexType>
-</xs:element>
+	<!-- definition of complex elements -->
+	<xs:element name="working_dir">
+		<xs:complexType>
+			<xs:sequence>
+				<xs:element ref="cl:dir" />
+				<xs:element ref="cl:NFS" />
+			</xs:sequence>
+		</xs:complexType>
+	</xs:element>
 
-<xs:element name="master-node">
-  <xs:complexType>
-    <xs:sequence>
-      <xs:element ref="cl:id"/>
-      <xs:element ref="cl:client-ip"/>
-      <xs:element ref="cl:cluster-ip"/>
-      <xs:element ref="cl:java_home" minOccurs="0"/>
-      <xs:element ref="cl:java_opts" minOccurs="0"/>
-      <xs:element ref="cl:logdir" minOccurs="0"/>
-      <xs:element ref="cl:debug" minOccurs="0"/>
-    </xs:sequence>
-  </xs:complexType>
-</xs:element>
+	<xs:element name="master_node">
+		<xs:complexType>
+			<xs:sequence>
+				<xs:element ref="cl:id" />
+				<xs:element ref="cl:client_ip" />
+				<xs:element ref="cl:cluster_ip" />
+				<xs:element ref="cl:java_home" minOccurs="0" />
+				<xs:element ref="cl:log_dir" minOccurs="0" />
+			</xs:sequence>
+		</xs:complexType>
+	</xs:element>
 
-<xs:element name="property">
-  <xs:complexType>
-    <xs:sequence>
-      <xs:element ref="cl:key"/>
-      <xs:element ref="cl:value"/>
-    </xs:sequence>
-  </xs:complexType>
-</xs:element>
+	<xs:element name="property">
+		<xs:complexType>
+			<xs:sequence>
+				<xs:element ref="cl:key" />
+				<xs:element ref="cl:value" />
+			</xs:sequence>
+		</xs:complexType>
+	</xs:element>
 
-<xs:element name="env">
-  <xs:complexType>
-    <xs:sequence>
-      <xs:element ref="cl:property" minOccurs="0" maxOccurs="unbounded"/>
-    </xs:sequence>
-  </xs:complexType>
-</xs:element>
+	<xs:element name="env">
+		<xs:complexType>
+			<xs:sequence>
+				<xs:element ref="cl:property" minOccurs="0" maxOccurs="unbounded" />
+			</xs:sequence>
+		</xs:complexType>
+	</xs:element>
 
-<xs:element name="node">
-  <xs:complexType>
-    <xs:sequence>
-      <xs:element ref="cl:id"/>
-      <xs:element ref="cl:cluster-ip"/>
-      <xs:element ref="cl:java_home" minOccurs="0"/>
-      <xs:element ref="cl:java_opts" minOccurs="0"/>
-      <xs:element ref="cl:logdir" minOccurs="0"/>
-      <xs:element ref="cl:store" minOccurs="0"/>
-      <xs:element ref="cl:iodevices" minOccurs="0"/>
-      <xs:element ref="cl:debug" minOccurs="0"/>
-    </xs:sequence>
-  </xs:complexType>
-</xs:element>
+	<xs:element name="node">
+		<xs:complexType>
+			<xs:sequence>
+				<xs:element ref="cl:id" />
+				<xs:element ref="cl:cluster_ip" />
+				<xs:element ref="cl:java_home" minOccurs="0" />
+				<xs:element ref="cl:log_dir" minOccurs="0" />
+				<xs:element ref="cl:txn_log_dir" minOccurs="0" />
+				<xs:element ref="cl:store" minOccurs="0" />
+				<xs:element ref="cl:iodevices" minOccurs="0" />
+			</xs:sequence>
+		</xs:complexType>
+	</xs:element>
 
-<xs:element name="cluster">
-  <xs:complexType>
-    <xs:sequence>
-      <xs:element ref="cl:name"/>
-      <xs:element ref="cl:username"/>
-      <xs:element ref="cl:env" minOccurs="0"/>
-      <xs:element ref="cl:java_home" minOccurs="0"/>
-      <xs:element ref="cl:java_opts" minOccurs="0"/>
-      <xs:element ref="cl:logdir" minOccurs="0"/>
-      <xs:element ref="cl:store" minOccurs="0"/>
-      <xs:element ref="cl:iodevices" minOccurs="0"/>
-      <xs:element ref="cl:workingDir"/>
-      <xs:element ref="cl:debugEnabled" minOccurs="0"/>
-      <xs:element ref="cl:debug" minOccurs="0"/>
-      <xs:element ref="cl:master-node"/>
-      <xs:element ref="cl:node" maxOccurs="unbounded"/>
-    </xs:sequence>
-  </xs:complexType>
-</xs:element>
+	<xs:element name="cluster">
+		<xs:complexType>
+			<xs:sequence>
+				<xs:element ref="cl:name" />
+				<xs:element ref="cl:username" />
+				<xs:element ref="cl:env" minOccurs="0" />
+				<xs:element ref="cl:java_home" minOccurs="0" />
+				<xs:element ref="cl:log_dir" minOccurs="0" />
+				<xs:element ref="cl:txn_log_dir" minOccurs="0" />
+				<xs:element ref="cl:store" minOccurs="0" />
+				<xs:element ref="cl:iodevices" minOccurs="0" />
+				<xs:element ref="cl:working_dir" />
+				<xs:element ref="cl:master_node" />
+				<xs:element ref="cl:node" maxOccurs="unbounded" />
+			</xs:sequence>
+		</xs:complexType>
+	</xs:element>
 
 </xs:schema>     
diff --git a/asterix-installer/pom.xml b/asterix-installer/pom.xml
index 5b3b7ff..8596c48 100644
--- a/asterix-installer/pom.xml
+++ b/asterix-installer/pom.xml
@@ -175,4 +175,4 @@
                         <scope>test</scope>
                 </dependency>
         </dependencies>
-</project>
\ No newline at end of file
+</project>
diff --git a/asterix-installer/src/main/assembly/binary-assembly.xml b/asterix-installer/src/main/assembly/binary-assembly.xml
index 739b444..1db12d5 100644
--- a/asterix-installer/src/main/assembly/binary-assembly.xml
+++ b/asterix-installer/src/main/assembly/binary-assembly.xml
@@ -3,6 +3,7 @@
   <formats>
     <format>dir</format>
     <format>zip</format>
+    <format>dir</format>
   </formats>
   <includeBaseDirectory>false</includeBaseDirectory>
   <fileSets>
@@ -62,6 +63,7 @@
       <includes>
          <include>log4j:log4j</include>
          <include>edu.uci.ics.asterix:asterix-events</include>
+         <include>edu.uci.ics.asterix:asterix-common</include>
          <include>org.apache.zookeeper:zookeeper</include>
          <include>args4j:args4j</include>
          <include>log4j:log4j</include>
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java
index 5ef7449..814f3c9 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java
@@ -15,10 +15,10 @@
 package edu.uci.ics.asterix.installer.command;
 
 import java.util.Date;
-import java.util.Properties;
 
 import org.kohsuke.args4j.Option;
 
+import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
 import edu.uci.ics.asterix.installer.driver.InstallerDriver;
 import edu.uci.ics.asterix.installer.driver.InstallerUtil;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
@@ -35,12 +35,14 @@
         InstallerUtil.validateAsterixInstanceExists(instanceName, State.INACTIVE);
         ILookupService lookupService = ServiceProvider.INSTANCE.getLookupService();
         AsterixInstance instance = lookupService.getAsterixInstance(instanceName);
-
-        Properties asterixConfProp = InstallerUtil.getAsterixConfiguration(((AlterConfig) config).confPath);
-        instance.setConfiguration(asterixConfProp);
+        InstallerUtil.createClusterProperties(instance.getCluster(), instance.getAsterixConfiguration());
+        AsterixConfiguration asterixConfiguration = InstallerUtil
+                .getAsterixConfiguration(((AlterConfig) config).confPath);
+        instance.setAsterixConfiguration(asterixConfiguration);
         instance.setModifiedTimestamp(new Date());
         lookupService.updateAsterixInstance(instance);
-        LOGGER.info("Configuration for Asterix instance: " + instanceName + " has been altered");
+        LOGGER.info("Altered configuration settings for Asterix instance: " + instanceName);
+
     }
 
     @Override
@@ -52,8 +54,8 @@
     protected String getUsageDescription() {
         return "\nAlter the instance's configuration settings."
                 + "\nPrior to running this command, the instance is required to be INACTIVE state."
-                + "\n\nAvailable arguments/options" + "\n-n name of the ASTERIX instance"
-                + "\n-conf path to the ASTERIX configuration file.";
+                + "\nChanged configuration settings will be reflected when the instance is started."
+                + "\n\nAvailable arguments/options" + "\n-n name of the ASTERIX instance.";
     }
 
 }
@@ -63,7 +65,7 @@
     @Option(name = "-n", required = true, usage = "Name of Asterix Instance")
     public String name;
 
-    @Option(name = "-conf", required = true, usage = "Path to instance configuration")
+    @Option(name = "-a", required = true, usage = "Path to asterix instance configuration")
     public String confPath;
 
 }
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ConfigureCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ConfigureCommand.java
index 7f12f01..43400ea 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ConfigureCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ConfigureCommand.java
@@ -26,9 +26,9 @@
         cluster.setWorkingDir(new WorkingDir(workingDir, true));
         cluster.setIodevices(workingDir);
         cluster.setStore("storage");
-        cluster.setLogdir(workingDir + File.separator + "logs");
+        cluster.setLogDir(workingDir + File.separator + "logs");
+        cluster.setTxnLogDir(workingDir + File.separator + "txnLogs");
         cluster.setJavaHome(System.getProperty("java.home"));
-        cluster.setJavaOpts("-Xmx1024m");
 
         JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
         Marshaller marshaller = ctx.createMarshaller();
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
index 81d6481..028d47b 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
@@ -15,19 +15,16 @@
 package edu.uci.ics.asterix.installer.command;
 
 import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
 
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.Unmarshaller;
 
 import org.kohsuke.args4j.Option;
 
+import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
 import edu.uci.ics.asterix.event.management.EventUtil;
 import edu.uci.ics.asterix.event.management.EventrixClient;
 import edu.uci.ics.asterix.event.schema.cluster.Cluster;
-import edu.uci.ics.asterix.event.schema.cluster.Env;
-import edu.uci.ics.asterix.event.schema.cluster.Property;
 import edu.uci.ics.asterix.event.schema.pattern.Patterns;
 import edu.uci.ics.asterix.installer.driver.InstallerDriver;
 import edu.uci.ics.asterix.installer.driver.InstallerUtil;
@@ -41,6 +38,7 @@
 
     private String asterixInstanceName;
     private Cluster cluster;
+    private AsterixConfiguration asterixConfiguration;
 
     @Override
     protected void execCommand() throws Exception {
@@ -54,24 +52,12 @@
         InstallerUtil.validateAsterixInstanceNotExists(asterixInstanceName);
         CreateConfig createConfig = (CreateConfig) config;
         cluster = EventUtil.getCluster(createConfig.clusterPath);
-        AsterixInstance asterixInstance = InstallerUtil.createAsterixInstance(asterixInstanceName, cluster);
+        asterixConfiguration = InstallerUtil.getAsterixConfiguration(createConfig.asterixConfPath);
+        AsterixInstance asterixInstance = InstallerUtil.createAsterixInstance(asterixInstanceName, cluster,
+                asterixConfiguration);
         InstallerUtil.evaluateConflictWithOtherInstances(asterixInstance);
-        InstallerUtil.createAsterixZip(asterixInstance, true);
-        List<Property> clusterProperties = new ArrayList<Property>();
-        clusterProperties.add(new Property("ASTERIX_HOME", cluster.getWorkingDir().getDir() + File.separator
-                + "asterix"));
-        StringBuilder javaOpts = new StringBuilder();
-        if (cluster.getJavaOpts() != null) {
-            javaOpts.append(cluster.getJavaOpts());
-        }
-        clusterProperties.add(new Property("JAVA_OPTS", javaOpts.toString()));
-        clusterProperties.add(new Property("CLUSTER_NET_IP", cluster.getMasterNode().getClusterIp()));
-        clusterProperties.add(new Property("CLIENT_NET_IP", cluster.getMasterNode().getClientIp()));
-        clusterProperties.add(new Property("LOG_DIR", cluster.getLogdir()));
-        clusterProperties.add(new Property("JAVA_HOME", cluster.getJavaHome()));
-        clusterProperties.add(new Property("WORKING_DIR", cluster.getWorkingDir().getDir()));
-        cluster.setEnv(new Env(clusterProperties));
-
+        InstallerUtil.createAsterixZip(asterixInstance);
+        InstallerUtil.createClusterProperties(cluster, asterixConfiguration);
         EventrixClient eventrixClient = InstallerUtil.getEventrixClient(cluster);
         PatternCreator pc = new PatternCreator();
 
@@ -107,7 +93,8 @@
         return "\nCreates an ASTERIX instance with a specified name."
                 + "\n\nPost creation, the instance is in ACTIVE state, indicating its "
                 + "\navailability for executing statements/queries." + "\n\nUsage arguments/options:"
-                + "\n-n Name of the ASTERIX instance." + "\n-c Path to the cluster configuration file";
+                + "\n-n Name of the ASTERIX instance." + "\n-c Path to the cluster configuration file"
+                + "\n[-a] Path to asterix configuration file" + "\n [..] indicates optional flag";
     }
 
 }
@@ -120,4 +107,7 @@
     @Option(name = "-c", required = true, usage = "Path to cluster configuration")
     public String clusterPath;
 
+    @Option(name = "-a", required = false, usage = "Path to asterix configuration")
+    public String asterixConfPath;
+
 }
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/HelpCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/HelpCommand.java
index f0e2fcc..b83c416 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/HelpCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/HelpCommand.java
@@ -60,6 +60,8 @@
                 break;
             case UNINSTALL:
                 helpMessage = new UninstallCommand().getUsageDescription();
+            case ALTER:
+                helpMessage = new AlterCommand().getUsageDescription();
                 break;
             default:
                 helpMessage = "Unknown command " + command;
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
index 01fdda4..1180a4e 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
@@ -25,8 +25,8 @@
 import edu.uci.ics.asterix.installer.error.VerificationUtil;
 import edu.uci.ics.asterix.installer.events.PatternCreator;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
 import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
+import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
 import edu.uci.ics.asterix.installer.service.ServiceProvider;
 
 public class StartCommand extends AbstractCommand {
@@ -36,13 +36,13 @@
         InstallerDriver.initConfig();
         String asterixInstanceName = ((StartConfig) config).name;
         AsterixInstance instance = InstallerUtil.validateAsterixInstanceExists(asterixInstanceName, State.INACTIVE);
-        InstallerUtil.createAsterixZip(instance, false);
+        InstallerUtil.createAsterixZip(instance);
         PatternCreator pc = new PatternCreator();
         EventrixClient client = InstallerUtil.getEventrixClient(instance.getCluster());
         Patterns asterixBinaryTransferPattern = pc.getAsterixBinaryTransferPattern(asterixInstanceName,
                 instance.getCluster());
         client.submit(asterixBinaryTransferPattern);
-
+        InstallerUtil.createClusterProperties(instance.getCluster(), instance.getAsterixConfiguration());
         Patterns patterns = pc.getStartAsterixPattern(asterixInstanceName, instance.getCluster());
         client.submit(patterns);
         InstallerUtil.deleteDirectory(InstallerDriver.getManagixHome() + File.separator + InstallerDriver.ASTERIX_DIR
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java
index dcc63df..6bde70b 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java
@@ -15,7 +15,6 @@
 package edu.uci.ics.asterix.installer.command;
 
 import java.io.File;
-import java.net.InetAddress;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
@@ -102,8 +101,8 @@
             serverIds.add(cluster.getMasterNode().getId());
 
             MasterNode masterNode = cluster.getMasterNode();
-            Node master = new Node(masterNode.getId(), masterNode.getClusterIp(), masterNode.getJavaOpts(),
-                    masterNode.getJavaHome(), masterNode.getLogdir(), null, null, null);
+            Node master = new Node(masterNode.getId(), masterNode.getClusterIp(), masterNode.getJavaHome(),
+                    masterNode.getLogDir(), null, null, null);
             ipAddresses.add(masterNode.getClusterIp());
 
             valid = valid & validateNodeConfiguration(master, cluster);
@@ -155,8 +154,8 @@
 
     private void validateClusterProperties(Cluster cluster) {
         List<String> tempDirs = new ArrayList<String>();
-        if (cluster.getLogdir() != null && checkTemporaryPath(cluster.getLogdir())) {
-            tempDirs.add("Log directory: " + cluster.getLogdir());
+        if (cluster.getLogDir() != null && checkTemporaryPath(cluster.getLogDir())) {
+            tempDirs.add("Log directory: " + cluster.getLogDir());
         }
         if (cluster.getIodevices() != null && checkTemporaryPath(cluster.getIodevices())) {
             tempDirs.add("IO Device: " + cluster.getIodevices());
@@ -175,7 +174,6 @@
 
     private boolean validateNodeConfiguration(Node node, Cluster cluster) {
         boolean valid = true;
-        valid = checkNodeReachability(node.getClusterIp());
         if (node.getJavaHome() == null || node.getJavaHome().length() == 0) {
             if (cluster.getJavaHome() == null || cluster.getJavaHome().length() == 0) {
                 valid = false;
@@ -183,8 +181,8 @@
             }
         }
 
-        if (node.getLogdir() == null || node.getLogdir().length() == 0) {
-            if (cluster.getLogdir() == null || cluster.getLogdir().length() == 0) {
+        if (node.getLogDir() == null || node.getLogDir().length() == 0) {
+            if (cluster.getLogDir() == null || cluster.getLogDir().length() == 0) {
                 valid = false;
                 LOGGER.fatal("log_dir not defined at cluster/node level for node: " + node.getId() + ERROR);
             }
@@ -244,10 +242,6 @@
                     + File.separator + InstallerDriver.MANAGIX_CONF_XML);
         }
 
-        for (String server : zk.getServers().getServer()) {
-            valid = valid && checkNodeReachability(server);
-        }
-
         if (valid) {
             valid = valid & checkPasswordLessSSHLogin(System.getProperty("user.name"), zk.getServers().getServer());
         }
@@ -255,21 +249,6 @@
         return valid;
     }
 
-    private boolean checkNodeReachability(String server) {
-        boolean reachable = true;
-        try {
-            InetAddress address = InetAddress.getByName(server);
-            if (!address.isReachable(1000)) {
-                LOGGER.fatal("\n" + "Server: " + server + " unreachable" + ERROR);
-                reachable = false;
-            }
-        } catch (Exception e) {
-            reachable = false;
-            LOGGER.fatal("\n" + "Server: " + server + " Invalid address" + ERROR);
-        }
-        return reachable;
-    }
-
 }
 
 class ValidateConfig extends CommandConfig {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
index 9d95b4a..659cd77 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
@@ -34,6 +34,8 @@
     public static final String MANAGIX_EVENT_DIR = MANAGIX_INTERNAL_DIR + File.separator + "eventrix";
     public static final String MANAGIX_EVENT_SCRIPTS_DIR = MANAGIX_INTERNAL_DIR + File.separator + "eventrix"
             + File.separator + "scripts";
+    public static final String DEFAULT_ASTERIX_CONFIGURATION_PATH = "conf" + File.separator + File.separator
+            + "asterix-configuration.xml";
     public static final String ASTERIX_DIR = "asterix";
     public static final String EVENTS_DIR = "events";
 
@@ -92,6 +94,10 @@
         return managixHome;
     }
 
+    public static void setManagixHome(String managixHome) {
+        InstallerDriver.managixHome = managixHome;
+    }
+
     public static String getAsterixDir() {
         return managixHome + File.separator + ASTERIX_DIR;
     }
@@ -125,6 +131,7 @@
         buffer.append("stop     " + ":" + " Stops an asterix instance that is in ACTIVE state" + "\n");
         buffer.append("backup   " + ":" + " Creates a back up for an existing asterix instance" + "\n");
         buffer.append("restore  " + ":" + " Restores an asterix instance" + "\n");
+        buffer.append("alter    " + ":" + " Alter the instance's configuration settings" + "\n");
         buffer.append("describe " + ":" + " Describes an existing asterix instance" + "\n");
         buffer.append("validate " + ":" + " Validates the installer/cluster configuration" + "\n");
         buffer.append("configure" + ":" + " Auto-generate configuration for local psedu-distributed Asterix instance"
@@ -133,11 +140,8 @@
         buffer.append("uninstall" + ":" + " Uninstalls a library from an asterix instance" + "\n");
         buffer.append("shutdown " + ":" + " Shutdown the installer service" + "\n");
         buffer.append("help     " + ":" + " Provides usage description of a command" + "\n");
-
+        buffer.append("\nTo get more information about a command, use managix help -cmd <command>");
         LOGGER.info(buffer.toString());
     }
 
-    public static void setManagixHome(String managixHome) {
-        InstallerDriver.managixHome = managixHome;
-    }
 }
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerUtil.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerUtil.java
index e2be142..2b884ef 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerUtil.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerUtil.java
@@ -40,12 +40,22 @@
 import java.util.zip.ZipInputStream;
 import java.util.zip.ZipOutputStream;
 
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Marshaller;
+import javax.xml.bind.Unmarshaller;
+
 import org.apache.commons.io.IOUtils;
 
+import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
+import edu.uci.ics.asterix.common.configuration.Store;
 import edu.uci.ics.asterix.event.driver.EventDriver;
 import edu.uci.ics.asterix.event.management.EventrixClient;
+import edu.uci.ics.asterix.event.management.EventUtil;
 import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Env;
 import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.event.schema.cluster.Property;
 import edu.uci.ics.asterix.installer.error.InstallerException;
 import edu.uci.ics.asterix.installer.error.OutputHandler;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
@@ -56,40 +66,63 @@
 
     public static final String TXN_LOG_DIR = "txnLogs";
     public static final String TXN_LOG_DIR_KEY_SUFFIX = "txnLogDir";
+    public static final String ASTERIX_CONFIGURATION_FILE = "asterix-configuration.xml";
+    public static final String TXN_LOG_CONFIGURATION_FILE = "log.properties";
 
-    public static AsterixInstance createAsterixInstance(String asterixInstanceName, Cluster cluster)
-            throws FileNotFoundException, IOException {
-        Properties asterixConfProp = new Properties();
-        asterixConfProp.put("output_dir", cluster.getWorkingDir().getDir() + File.separator + "asterix_output");
+    public static AsterixInstance createAsterixInstance(String asterixInstanceName, Cluster cluster,
+            AsterixConfiguration asterixConfiguration) throws FileNotFoundException, IOException {
         Node metadataNode = getMetadataNode(cluster);
         String asterixZipName = InstallerDriver.getAsterixZip().substring(
                 InstallerDriver.getAsterixZip().lastIndexOf(File.separator) + 1);
         String asterixVersion = asterixZipName.substring("asterix-server-".length(),
                 asterixZipName.indexOf("-binary-assembly"));
-        AsterixInstance instance = new AsterixInstance(asterixInstanceName, cluster, asterixConfProp,
+        AsterixInstance instance = new AsterixInstance(asterixInstanceName, cluster, asterixConfiguration,
                 metadataNode.getId(), asterixVersion);
         return instance;
     }
 
-    public static void createAsterixZip(AsterixInstance asterixInstance, boolean newDeployment) throws IOException,
-            InterruptedException {
+    public static void createAsterixZip(AsterixInstance asterixInstance) throws IOException, InterruptedException,
+            JAXBException, InstallerException {
 
-        String modifiedZipPath = injectAsterixPropertyFile(InstallerDriver.getAsterixZip(), asterixInstance,
-                newDeployment);
+        String modifiedZipPath = injectAsterixPropertyFile(InstallerDriver.getAsterixZip(), asterixInstance);
         injectAsterixLogPropertyFile(modifiedZipPath, asterixInstance);
     }
 
-    private static String injectAsterixPropertyFile(String origZipFile, AsterixInstance asterixInstance,
-            boolean newDeployment) throws IOException {
-        writeAsterixConfigurationFile(asterixInstance, newDeployment);
+    public static void createClusterProperties(Cluster cluster, AsterixConfiguration asterixConfiguration) {
+        List<Property> clusterProperties = null;
+        if (cluster.getEnv() != null && cluster.getEnv().getProperty() != null) {
+            clusterProperties = cluster.getEnv().getProperty();
+            clusterProperties.clear();
+        } else {
+            clusterProperties = new ArrayList<Property>();
+        }
+        for (edu.uci.ics.asterix.common.configuration.Property property : asterixConfiguration.getProperty()) {
+            if (property.getName().equalsIgnoreCase(EventUtil.CC_JAVA_OPTS)) {
+                clusterProperties.add(new Property(EventUtil.CC_JAVA_OPTS, property.getValue()));
+            } else if (property.getName().equalsIgnoreCase(EventUtil.NC_JAVA_OPTS)) {
+                clusterProperties.add(new Property(EventUtil.NC_JAVA_OPTS, property.getValue()));
+            }
+        }
+        clusterProperties.add(new Property("ASTERIX_HOME", cluster.getWorkingDir().getDir() + File.separator
+                + "asterix"));
+        clusterProperties.add(new Property("CLUSTER_NET_IP", cluster.getMasterNode().getClusterIp()));
+        clusterProperties.add(new Property("CLIENT_NET_IP", cluster.getMasterNode().getClientIp()));
+        clusterProperties.add(new Property("LOG_DIR", cluster.getLogDir()));
+        clusterProperties.add(new Property("JAVA_HOME", cluster.getJavaHome()));
+        clusterProperties.add(new Property("WORKING_DIR", cluster.getWorkingDir().getDir()));
+        cluster.setEnv(new Env(clusterProperties));
+    }
+
+    private static String injectAsterixPropertyFile(String origZipFile, AsterixInstance asterixInstance)
+            throws IOException, JAXBException {
+        writeAsterixConfigurationFile(asterixInstance);
         String asterixInstanceDir = InstallerDriver.getAsterixDir() + File.separator + asterixInstance.getName();
         unzip(origZipFile, asterixInstanceDir);
         File sourceJar = new File(asterixInstanceDir + File.separator + "lib" + File.separator + "asterix-app-"
                 + asterixInstance.getAsterixVersion() + ".jar");
-        String asterixPropertyFile = "test.properties";
-        File replacementFile = new File(asterixInstanceDir + File.separator + "test.properties");
-        replaceInJar(sourceJar, asterixPropertyFile, replacementFile);
-        new File(asterixInstanceDir + File.separator + "test.properties").delete();
+        File replacementFile = new File(asterixInstanceDir + File.separator + ASTERIX_CONFIGURATION_FILE);
+        replaceInJar(sourceJar, ASTERIX_CONFIGURATION_FILE, replacementFile);
+        new File(asterixInstanceDir + File.separator + ASTERIX_CONFIGURATION_FILE).delete();
         String asterixZipName = InstallerDriver.getAsterixZip().substring(
                 InstallerDriver.getAsterixZip().lastIndexOf(File.separator) + 1);
         zipDir(new File(asterixInstanceDir), new File(asterixInstanceDir + File.separator + asterixZipName));
@@ -97,25 +130,24 @@
     }
 
     private static String injectAsterixLogPropertyFile(String origZipFile, AsterixInstance asterixInstance)
-            throws IOException {
+            throws IOException, InstallerException {
         String asterixInstanceDir = InstallerDriver.getAsterixDir() + File.separator + asterixInstance.getName();
         unzip(origZipFile, asterixInstanceDir);
         File sourceJar1 = new File(asterixInstanceDir + File.separator + "lib" + File.separator + "asterix-app-"
                 + asterixInstance.getAsterixVersion() + ".jar");
-        String txnLogPropertyFile = "log.properties";
         Properties txnLogProperties = new Properties();
         URLClassLoader urlClassLoader = new URLClassLoader(new URL[] { sourceJar1.toURI().toURL() });
-        InputStream in = urlClassLoader.getResourceAsStream(txnLogPropertyFile);
+        InputStream in = urlClassLoader.getResourceAsStream(TXN_LOG_CONFIGURATION_FILE);
         if (in != null) {
             txnLogProperties.load(in);
         }
 
-        writeAsterixLogConfigurationFile(asterixInstance.getName(), asterixInstance.getCluster(), txnLogProperties);
+        writeAsterixLogConfigurationFile(asterixInstance, txnLogProperties);
 
         File sourceJar2 = new File(asterixInstanceDir + File.separator + "lib" + File.separator + "asterix-app-"
                 + asterixInstance.getAsterixVersion() + ".jar");
         File replacementFile = new File(asterixInstanceDir + File.separator + "log.properties");
-        replaceInJar(sourceJar2, txnLogPropertyFile, replacementFile);
+        replaceInJar(sourceJar2, TXN_LOG_CONFIGURATION_FILE, replacementFile);
 
         new File(asterixInstanceDir + File.separator + "log.properties").delete();
         String asterixZipName = InstallerDriver.getAsterixZip().substring(
@@ -174,54 +206,74 @@
         return nodeDataStore.toString();
     }
 
-    private static void writeAsterixConfigurationFile(AsterixInstance asterixInstance, boolean newData)
-            throws IOException {
+    private static void writeAsterixConfigurationFile(AsterixInstance asterixInstance) throws IOException,
+            JAXBException {
         String asterixInstanceName = asterixInstance.getName();
         Cluster cluster = asterixInstance.getCluster();
         String metadataNodeId = asterixInstance.getMetadataNodeId();
 
-        StringBuffer conf = new StringBuffer();
-        conf.append("MetadataNode=" + asterixInstanceName + "_" + metadataNodeId + "\n");
-        conf.append("NewUniverse=" + newData + "\n");
+        AsterixConfiguration configuration = asterixInstance.getAsterixConfiguration();
+        configuration.setMetadataNode(asterixInstanceName + "_" + metadataNodeId);
 
         String storeDir = null;
+        List<Store> stores = new ArrayList<Store>();
         for (Node node : cluster.getNode()) {
             storeDir = node.getStore() == null ? cluster.getStore() : node.getStore();
-            conf.append(asterixInstanceName + "_" + node.getId() + ".stores" + "=" + storeDir + "\n");
+            stores.add(new Store(asterixInstanceName + "_" + node.getId(), storeDir));
         }
-
-        Properties asterixConfProp = asterixInstance.getConfiguration();
-        String outputDir = asterixConfProp.getProperty("output_dir");
-        conf.append("OutputDir=" + outputDir);
+        configuration.setStore(stores);
 
         File asterixConfDir = new File(InstallerDriver.getAsterixDir() + File.separator + asterixInstanceName);
         asterixConfDir.mkdirs();
-        dumpToFile(InstallerDriver.getAsterixDir() + File.separator + asterixInstanceName + File.separator
-                + "test.properties", conf.toString());
+
+        JAXBContext ctx = JAXBContext.newInstance(AsterixConfiguration.class);
+        Marshaller marshaller = ctx.createMarshaller();
+        marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
+        marshaller.marshal(configuration, new FileOutputStream(asterixConfDir + File.separator
+                + ASTERIX_CONFIGURATION_FILE));
     }
 
-    private static void writeAsterixLogConfigurationFile(String asterixInstanceName, Cluster cluster,
-            Properties logProperties) throws IOException {
+    private static void writeAsterixLogConfigurationFile(AsterixInstance asterixInstance, Properties logProperties)
+            throws IOException, InstallerException {
+        String asterixInstanceName = asterixInstance.getName();
+        Cluster cluster = asterixInstance.getCluster();
         StringBuffer conf = new StringBuffer();
         for (Map.Entry<Object, Object> p : logProperties.entrySet()) {
             conf.append(p.getKey() + "=" + p.getValue() + "\n");
         }
 
         for (Node node : cluster.getNode()) {
-            String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
-            String txnLogDir = iodevices.split(",")[0].trim() + File.separator + InstallerUtil.TXN_LOG_DIR;
+            String txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
+            if (txnLogDir == null) {
+                throw new InstallerException("Transaction log directory (txn_log_dir) not configured for node: "
+                        + node.getId());
+            }
             conf.append(asterixInstanceName + "_" + node.getId() + "." + TXN_LOG_DIR_KEY_SUFFIX + "=" + txnLogDir
                     + "\n");
         }
+        List<edu.uci.ics.asterix.common.configuration.Property> properties = asterixInstance.getAsterixConfiguration()
+                .getProperty();
+        for (edu.uci.ics.asterix.common.configuration.Property p : properties) {
+            if (p.getName().trim().toLowerCase().contains("log")) {
+                conf.append(p.getValue() + "=" + p.getValue());
+            }
+        }
         dumpToFile(InstallerDriver.getAsterixDir() + File.separator + asterixInstanceName + File.separator
                 + "log.properties", conf.toString());
 
     }
 
-    public static Properties getAsterixConfiguration(String asterixConf) throws FileNotFoundException, IOException {
-        Properties prop = new Properties();
-        prop.load(new FileInputStream(asterixConf));
-        return prop;
+    public static AsterixConfiguration getAsterixConfiguration(String asterixConf) throws FileNotFoundException,
+            IOException, JAXBException {
+        if (asterixConf == null) {
+            asterixConf = InstallerDriver.getManagixHome() + File.separator
+                    + InstallerDriver.DEFAULT_ASTERIX_CONFIGURATION_PATH;
+        }
+        File file = new File(asterixConf);
+        JAXBContext ctx = JAXBContext.newInstance(AsterixConfiguration.class);
+        Unmarshaller unmarshaller = ctx.createUnmarshaller();
+        AsterixConfiguration asterixConfiguration = (AsterixConfiguration) unmarshaller.unmarshal(file);
+        return asterixConfiguration;
     }
 
     public static void unzip(String sourceFile, String destDir) throws IOException {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java
index 0068c08..53dc24b 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java
@@ -174,16 +174,18 @@
         String workingDir = cluster.getWorkingDir().getDir();
         String backupId = "" + instance.getBackupInfo().size();
         String iodevices;
+        String txnLogDir;
         String store;
         String pargs;
         List<Pattern> patternList = new ArrayList<Pattern>();
         for (Node node : cluster.getNode()) {
             Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
             iodevices = node.getIodevices() == null ? instance.getCluster().getIodevices() : node.getIodevices();
+            txnLogDir = node.getTxnLogDir() == null ? instance.getCluster().getTxnLogDir() : node.getTxnLogDir();
             store = node.getStore() == null ? cluster.getStore() : node.getStore();
             pargs = workingDir + " " + instance.getName() + " " + iodevices + " " + store + " "
-                    + BackupCommand.ASTERIX_ROOT_METADATA_DIR + " " + InstallerUtil.TXN_LOG_DIR + " " + backupId + " "
-                    + backupDir + " " + "local" + " " + node.getId();
+                    + BackupCommand.ASTERIX_ROOT_METADATA_DIR + " " + txnLogDir + " " + backupId + " " + backupDir
+                    + " " + "local" + " " + node.getId();
             Event event = new Event("backup", nodeid, pargs);
             patternList.add(new Pattern(null, 1, null, event));
         }
@@ -487,7 +489,7 @@
     private Patterns createRemoveAsterixLogDirPattern(AsterixInstance instance) throws Exception {
         List<Pattern> patternList = new ArrayList<Pattern>();
         Cluster cluster = instance.getCluster();
-        String pargs = instance.getCluster().getLogdir();
+        String pargs = instance.getCluster().getLogDir();
         Nodeid nodeid = new Nodeid(new Value(null, cluster.getMasterNode().getId()));
         Event event = new Event("file_delete", nodeid, pargs);
         patternList.add(new Pattern(null, 1, null, event));
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixInstance.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixInstance.java
index 9a2d43d..1eba89f 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixInstance.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixInstance.java
@@ -18,8 +18,9 @@
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
-import java.util.Properties;
 
+import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
+import edu.uci.ics.asterix.common.configuration.Property;
 import edu.uci.ics.asterix.event.schema.cluster.Cluster;
 import edu.uci.ics.asterix.event.schema.cluster.Node;
 
@@ -27,6 +28,9 @@
 
     private static final long serialVersionUID = 2874439550187520449L;
 
+    public static final String CC_JAVA_OPTS = "cc_java_opts";
+    public static final String NC_JAVA_OPTS = "nc_java_opts";
+
     public enum State {
         ACTIVE,
         INACTIVE,
@@ -38,7 +42,7 @@
     private final Date createdTimestamp;
     private Date stateChangeTimestamp;
     private Date modifiedTimestamp;
-    private Properties configuration;
+    private AsterixConfiguration asterixConfiguration;
     private State state;
     private final String metadataNodeId;
     private final String asterixVersion;
@@ -47,11 +51,11 @@
     private AsterixRuntimeState runtimeState;
     private State previousState;
 
-    public AsterixInstance(String name, Cluster cluster, Properties configuration, String metadataNodeId,
-            String asterixVersion) {
+    public AsterixInstance(String name, Cluster cluster, AsterixConfiguration asterixConfiguration,
+            String metadataNodeId, String asterixVersion) {
         this.name = name;
         this.cluster = cluster;
-        this.configuration = configuration;
+        this.asterixConfiguration = asterixConfiguration;
         this.metadataNodeId = metadataNodeId;
         this.state = State.ACTIVE;
         this.previousState = State.UNUSABLE;
@@ -65,14 +69,6 @@
         return stateChangeTimestamp;
     }
 
-    public Properties getConfiguration() {
-        return configuration;
-    }
-
-    public void setConfiguration(Properties properties) {
-        this.configuration = properties;
-    }
-
     public State getState() {
         return state;
     }
@@ -174,9 +170,23 @@
             buffer.append(pInfo + "\n");
         }
 
+        buffer.append("\n");
+        buffer.append("Asterix Configuration\n");
+        for (Property property : asterixConfiguration.getProperty()) {
+            buffer.append(property.getName() + ":" + property.getValue() + "\n");
+        }
+
     }
 
     public State getPreviousState() {
         return previousState;
     }
+
+    public AsterixConfiguration getAsterixConfiguration() {
+        return asterixConfiguration;
+    }
+
+    public void setAsterixConfiguration(AsterixConfiguration asterixConfiguration) {
+        this.asterixConfiguration = asterixConfiguration;
+    }
 }
diff --git a/asterix-installer/src/main/resources/clusters/local/conf/asterix.conf b/asterix-installer/src/main/resources/clusters/local/conf/asterix.conf
deleted file mode 100644
index 659b48e..0000000
--- a/asterix-installer/src/main/resources/clusters/local/conf/asterix.conf
+++ /dev/null
@@ -1 +0,0 @@
-output_dir=/tmp/asterix_output/
diff --git a/asterix-installer/src/main/resources/clusters/local/local.xml b/asterix-installer/src/main/resources/clusters/local/local.xml
index 1cbcdfa..0af8dfc 100644
--- a/asterix-installer/src/main/resources/clusters/local/local.xml
+++ b/asterix-installer/src/main/resources/clusters/local/local.xml
@@ -1,23 +1,21 @@
 <cluster xmlns="cluster">
   <name>local</name>
-  <workingDir>
+  <working_dir>
     <dir>/tmp/asterix-installer</dir>
     <NFS>true</NFS>
-  </workingDir>
-  <logdir>/tmp/asterix/logs</logdir>
+  </working_dir>
+  <log_dir>/tmp/asterix/logs</log_dir>
+  <txn_log_dir>/tmp/asterix/logs</txn_log_dir>
   <iodevices>/tmp</iodevices>
   <store>asterix/storage</store>
   <java_home></java_home>
-  <java_opts>-Xmx1024m</java_opts>
-  <master-node>
+  <master_node>
     <id>master</id>
-    <client-ip>127.0.0.1</client-ip>
-    <cluster-ip>127.0.0.1</cluster-ip>
-    <java_opts>-Xmx1024m -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=8800</java_opts>
-  </master-node>
+    <client_ip>127.0.0.1</client_ip>
+    <cluster_ip>127.0.0.1</cluster_ip>
+  </master_node>
   <node>
     <id>node1</id>
-    <cluster-ip>127.0.0.1</cluster-ip>
-    <java_opts>-Xmx1024m -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=8701</java_opts>
+    <cluster_ip>127.0.0.1</cluster_ip>
   </node>
 </cluster>
diff --git a/asterix-installer/src/main/resources/conf/asterix-configuration.xml b/asterix-installer/src/main/resources/conf/asterix-configuration.xml
new file mode 100644
index 0000000..b6958c5
--- /dev/null
+++ b/asterix-installer/src/main/resources/conf/asterix-configuration.xml
@@ -0,0 +1,190 @@
+<asterixConfiguration xmlns="asterixconf">
+
+	<property>
+		<name>nc.java.opts</name>
+		<value>-Xmx1024m</value>
+		<description>JVM parameters for each Node Contoller (NC)</description>
+	</property>
+
+	<property>
+		<name>cc.java.opts</name>
+		<value>-Xmx1024m</value>
+		<description>JVM parameters for each Cluster Contoller (CC)
+		</description>
+	</property>
+
+	<property>
+		<name>storage.buffercache.pagesize</name>
+		<value>32768</value>
+		<description>The page size in bytes for pages in the buffer cache.
+			(Default = "32768" // 32KB)
+		</description>
+	</property>
+
+	<property>
+		<name>storage.buffercache.numpages</name>
+		<value>1024</value>
+		<description>The number of pages allocated to the disk buffer cache.
+			(Default = "1024")
+		</description>
+	</property>
+
+	<property>
+		<name>storage.buffercache.maxopenfiles</name>
+		<value>214748364</value>
+		<description>The maximum number of open files in the buffer cache.
+			(Default = "214748364")
+		</description>
+	</property>
+
+	<property>
+		<name>storage.memorycomponent.pagesize</name>
+		<value>32768</value>
+		<description>The page size in bytes for pages allocated to memory
+			components. (Default = "32768" // 32KB)
+		</description>
+	</property>
+
+	<property>
+		<name>storage.memorycomponent.numpages</name>
+		<value>4096</value>
+		<description>The number of pages to allocate for a memory component.
+			(Default = 4096)
+		</description>
+	</property>
+
+	<property>
+		<name>storage.memorycomponent.globalbudget</name>
+		<value>1073741824</value>
+		<description>The total size of memory in bytes that the sum of all
+			open memory
+			components cannot exceed. (Default = "1073741824" // 1GB)
+		</description>
+	</property>
+
+	<property>
+		<name>storage.lsm.mergethreshold</name>
+		<value>3</value>
+		<description>The number of on-disk components an LSM index can have
+			before a merge is triggered. (Default = 3)
+		</description>
+	</property>
+
+	<property>
+		<name>storage.lsm.bloomfilter.falsepositiverate</name>
+		<value>0.01</value>
+		<description>The maximum acceptable false positive rate for bloom
+			filters associated with LSM indexes. (Default = "0.01" // 1%)
+		</description>
+	</property>
+
+	<property>
+		<name>txn.log.buffer.numpages</name>
+		<value>8</value>
+		<description>The number of in-memory log buffer pages. (Default = "8")
+		</description>
+	</property>
+
+	<property>
+		<name>txn.log.buffer.pagesize</name>
+		<value>131072</value>
+		<description>The size of pages in the in-memory log buffer. (Default =
+			"131072" // 128KB)
+		</description>
+	</property>
+
+	<property>
+		<name>txn.log.partitionsize</name>
+		<value>2147483648</value>
+		<description>The maximum size of a log file partition allowed before
+			rotating the log to the next partition. (Default = "2147483648" //
+			2GB)
+		</description>
+	</property>
+
+	<property>
+		<name>txn.log.groupcommitinterval</name>
+		<value>200</value>
+		<description>The group commit wait time in milliseconds. (Default =
+			"200" // 2ms)
+		</description>
+	</property>
+
+	<property>
+		<name>txn.log.checkpoint.lsnthreshold</name>
+		<value>67108864</value>
+		<description>The size of the window that the maximum LSN is allowed to
+			be ahead of the checkpoint LSN by. (Default = ""67108864" // 64M)
+		</description>
+	</property>
+
+	<property>
+		<name>txn.log.checkpoint.pollfrequency</name>
+		<value>120</value>
+		<description>The time in seconds between that the checkpoint thread
+			waits between polls. (Default = "120" // 120s)
+		</description>
+	</property>
+
+	<property>
+		<name>txn.lock.escalationthreshold</name>
+		<value>1000</value>
+		<description>The number of entity level locks that need to be acquired
+			before the locks are coalesced and escalated into a dataset level
+			lock. (Default = "1000")
+		</description>
+	</property>
+
+	<property>
+		<name>txn.lock.shrinktimer</name>
+		<value>120000</value>
+		<description>The time in milliseconds to wait before deallocating
+			unused lock manager memory. (Default = "120000" // 120s)
+		</description>
+	</property>
+
+	<property>
+		<name>compiler.sortmemory</name>
+		<value>536870912</value>
+		<description>The amount of memory in bytes given to sort operations.
+			(Default = "536870912" // 512mb)
+		</description>
+	</property>
+
+	<property>
+		<name>compiler.joinmemory</name>
+		<value>536870912</value>
+		<description>The amount of memory in bytes given to join operations.
+			(Default = "536870912" // 512mb)
+		</description>
+	</property>
+
+	<property>
+		<name>compiler.framesize</name>
+		<value>32768</value>
+		<description>The Hyracks frame size that the compiler configures per
+			job. (Default = "32768" // 32KB)
+		</description>
+	</property>
+
+	<property>
+		<name>web.port</name>
+		<value>19001</value>
+		<description>The port for the ASTERIX web interface. (Default = 19001)
+		</description>
+	</property>
+
+	<property>
+		<name>api.port</name>
+		<value>19101</value>
+		<description>The port for the ASTERIX API server. (Default = 19101)
+		</description>
+	</property>
+
+	<property>
+		<name>log.level</name>
+		<value>INFO</value>
+		<description>The minimum log level to be displayed. (Default = INFO)
+		</description>
+	</property>
+</asterixConfiguration>
diff --git a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixInstallerIntegrationUtil.java b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixInstallerIntegrationUtil.java
index 0a058ff..2b86489 100644
--- a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixInstallerIntegrationUtil.java
+++ b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixInstallerIntegrationUtil.java
@@ -29,6 +29,7 @@
 import javax.xml.bind.Unmarshaller;
 
 import edu.uci.ics.asterix.installer.command.CommandHandler;
+import edu.uci.ics.asterix.installer.command.ShutdownCommand;
 import edu.uci.ics.asterix.installer.driver.InstallerDriver;
 import edu.uci.ics.asterix.installer.error.VerificationUtil;
 import edu.uci.ics.asterix.installer.model.AsterixInstance;
@@ -47,6 +48,9 @@
     public static final String ASTERIX_INSTANCE_NAME = "asterix";
     private static final String CC_IP_ADDRESS = "127.0.0.1";
     private static final int DEFAULT_HYRACKS_CC_CLIENT_PORT = 1098;
+    private static final int zookeeperClientPort = 2900;
+    private static final int zookeeperTestClientPort = 3945;
+
     private static IHyracksClientConnection hcc;
 
     private static final Logger LOGGER = Logger.getLogger(AsterixInstallerIntegrationUtil.class.getName());
@@ -92,24 +96,23 @@
         return hcc;
     }
 
-    private static void startZookeeper() throws IOException, JAXBException, InterruptedException {
-        initZookeeperTestConfiguration();
+    private static void startZookeeper() throws Exception {
+        initZookeeperTestConfiguration(zookeeperClientPort);
         String script = managixHome + File.separator + "bin" + File.separator + "managix";
 
         // shutdown zookeeper if running
-        ProcessBuilder pb = new ProcessBuilder(script, "shutdown");
-        Map<String, String> env = pb.environment();
-        env.put("MANAGIX_HOME", managixHome);
-        pb.start();
+        String command = "shutdown";
+        cmdHandler.processCommand(command.split(" "));
+
         Thread.sleep(2000);
 
         // start zookeeper 
+        initZookeeperTestConfiguration(zookeeperTestClientPort);
         ProcessBuilder pb2 = new ProcessBuilder(script, "describe");
         Map<String, String> env2 = pb2.environment();
         env2.put("MANAGIX_HOME", managixHome);
         pb2.start();
 
-        Thread.sleep(2000);
     }
 
     public static void createInstance() throws Exception {
@@ -131,12 +134,12 @@
         assert (state.getFailedNCs().isEmpty() && state.isCcRunning());
     }
 
-    private static void initZookeeperTestConfiguration() throws JAXBException, FileNotFoundException {
+    private static void initZookeeperTestConfiguration(int port) throws JAXBException, FileNotFoundException {
         String installerConfPath = InstallerDriver.getManagixHome() + File.separator + InstallerDriver.MANAGIX_CONF_XML;
         JAXBContext ctx = JAXBContext.newInstance(Configuration.class);
         Unmarshaller unmarshaller = ctx.createUnmarshaller();
         Configuration configuration = (Configuration) unmarshaller.unmarshal(new File(installerConfPath));
-        configuration.getZookeeper().setClientPort(new BigInteger("3945"));
+        configuration.getZookeeper().setClientPort(new BigInteger("" + port));
         Marshaller marshaller = ctx.createMarshaller();
         marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
         marshaller.marshal(configuration, new FileOutputStream(installerConfPath));
diff --git a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java
index 4d9a62a..98c6bf0 100644
--- a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java
+++ b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java
@@ -20,7 +20,6 @@
 import edu.uci.ics.asterix.test.aql.TestsUtils;
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 
-
 public class AsterixLifecycleIT {
 
     private static final int NUM_NC = 1;
@@ -58,7 +57,7 @@
                     AsterixInstallerIntegrationUtil.ASTERIX_INSTANCE_NAME);
             AsterixRuntimeState state = VerificationUtil.getAsterixRuntimeState(instance);
             assert (state.getFailedNCs().size() == NUM_NC && !state.isCcRunning());
-            System.out.println("Test stop active instance PASSED");
+            LOGGER.info("Test stop active instance PASSED");
         } catch (Exception e) {
             throw new Exception("Test configure installer " + "\" FAILED!", e);
         }
@@ -74,7 +73,7 @@
                     AsterixInstallerIntegrationUtil.ASTERIX_INSTANCE_NAME);
             AsterixRuntimeState state = VerificationUtil.getAsterixRuntimeState(instance);
             assert (state.getFailedNCs().size() == 0 && state.isCcRunning());
-            System.out.println("Test start active instance PASSED");
+            LOGGER.info("Test start active instance PASSED");
         } catch (Exception e) {
             throw new Exception("Test configure installer " + "\" FAILED!", e);
         }
@@ -89,7 +88,7 @@
             AsterixInstance instance = ServiceProvider.INSTANCE.getLookupService().getAsterixInstance(
                     AsterixInstallerIntegrationUtil.ASTERIX_INSTANCE_NAME);
             assert (instance == null);
-            System.out.println("Test delete active instance PASSED");
+            LOGGER.info("Test delete active instance PASSED");
         } catch (Exception e) {
             throw new Exception("Test delete active instance " + "\" FAILED!", e);
         } finally {
@@ -104,18 +103,17 @@
             TestsUtils.executeTest(PATH_ACTUAL, testCaseCtx);
         }
     }
-    
+
     public static void main(String[] args) throws Exception {
         try {
             setUp();
             new AsterixLifecycleIT().test();
         } catch (Exception e) {
             e.printStackTrace();
-            LOGGER.info("TEST CASES FAILED");
+            LOGGER.info("TEST CASE(S) FAILED");
         } finally {
             tearDown();
         }
     }
 
-
 }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
index a573ed1..7942159 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
@@ -20,11 +20,11 @@
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
 import edu.uci.ics.asterix.common.functions.FunctionSignature;
 import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
 import edu.uci.ics.asterix.metadata.api.IMetadataManager;
 import edu.uci.ics.asterix.metadata.api.IMetadataNode;
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
 import edu.uci.ics.asterix.metadata.entities.Dataset;
 import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
 import edu.uci.ics.asterix.metadata.entities.Datatype;
@@ -71,615 +71,593 @@
  * with transaction ids of regular jobs or other metadata transactions.
  */
 public class MetadataManager implements IMetadataManager {
-	// Set in init().
-	public static MetadataManager INSTANCE;
-	private final MetadataCache cache = new MetadataCache();
-	private IAsterixStateProxy proxy;
-	private IMetadataNode metadataNode;
-	private final ReadWriteLock metadataLatch;
 
-	public MetadataManager(IAsterixStateProxy proxy) {
-		if (proxy == null) {
-			throw new Error("Null proxy given to MetadataManager.");
-		}
-		this.proxy = proxy;
-		this.metadataNode = null;
-		this.metadataLatch = new ReentrantReadWriteLock(true);
-	}
+    // Set in init().
+    public static MetadataManager INSTANCE;
+    private final MetadataCache cache = new MetadataCache();
+    private IAsterixStateProxy proxy;
+    private IMetadataNode metadataNode;
+    private final ReadWriteLock metadataLatch;
+    private final AsterixMetadataProperties metadataProperties;
 
-	@Override
-	public void init() throws RemoteException {
-		// Could be synchronized on any object. Arbitrarily chose proxy.
-		synchronized (proxy) {
-			if (metadataNode != null) {
-				return;
-			}
-			metadataNode = proxy.getMetadataNode();
-			if (metadataNode == null) {
-				throw new Error("Failed to get the MetadataNode.\n"
-						+ "The MetadataNode was configured to run on NC: "
-						+ proxy.getAsterixProperties().getMetadataNodeName());
-			}
-		}
-	}
+    public MetadataManager(IAsterixStateProxy proxy, AsterixMetadataProperties metadataProperties) {
+        if (proxy == null) {
+            throw new Error("Null proxy given to MetadataManager.");
+        }
+        this.proxy = proxy;
+        this.metadataProperties = metadataProperties;
+        this.metadataNode = null;
+        this.metadataLatch = new ReentrantReadWriteLock(true);
+    }
 
-	@Override
-	public MetadataTransactionContext beginTransaction()
-			throws RemoteException, ACIDException {
-		JobId jobId = JobIdFactory.generateJobId();
-		metadataNode.beginTransaction(jobId);
-		return new MetadataTransactionContext(jobId);
-	}
+    @Override
+    public void init() throws RemoteException {
+        // Could be synchronized on any object. Arbitrarily chose proxy.
+        synchronized (proxy) {
+            if (metadataNode != null) {
+                return;
+            }
+            metadataNode = proxy.getMetadataNode();
+            if (metadataNode == null) {
+                throw new Error("Failed to get the MetadataNode.\n" + "The MetadataNode was configured to run on NC: "
+                        + metadataProperties.getMetadataNodeName());
+            }
+        }
+    }
 
-	@Override
-	public void commitTransaction(MetadataTransactionContext ctx)
-			throws RemoteException, ACIDException {
-		metadataNode.commitTransaction(ctx.getJobId());
-		cache.commit(ctx);
-	}
+    @Override
+    public MetadataTransactionContext beginTransaction() throws RemoteException, ACIDException {
+        JobId jobId = JobIdFactory.generateJobId();
+        metadataNode.beginTransaction(jobId);
+        return new MetadataTransactionContext(jobId);
+    }
 
-	@Override
-	public void abortTransaction(MetadataTransactionContext ctx)
-			throws RemoteException, ACIDException {
-		metadataNode.abortTransaction(ctx.getJobId());
-	}
+    @Override
+    public void commitTransaction(MetadataTransactionContext ctx) throws RemoteException, ACIDException {
+        metadataNode.commitTransaction(ctx.getJobId());
+        cache.commit(ctx);
+    }
 
-	@Override
-	public void lock(MetadataTransactionContext ctx, byte lockMode)
-			throws RemoteException, ACIDException {
-		metadataNode.lock(ctx.getJobId(), lockMode);
-	}
+    @Override
+    public void abortTransaction(MetadataTransactionContext ctx) throws RemoteException, ACIDException {
+        metadataNode.abortTransaction(ctx.getJobId());
+    }
 
-	@Override
-	public void unlock(MetadataTransactionContext ctx) throws RemoteException,
-			ACIDException {
-		metadataNode.unlock(ctx.getJobId());
-	}
+    @Override
+    public void lock(MetadataTransactionContext ctx, byte lockMode) throws RemoteException, ACIDException {
+        metadataNode.lock(ctx.getJobId(), lockMode);
+    }
 
-	@Override
-	public void addDataverse(MetadataTransactionContext ctx, Dataverse dataverse)
-			throws MetadataException {
-		try {
-			metadataNode.addDataverse(ctx.getJobId(), dataverse);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.addDataverse(dataverse);
-	}
+    @Override
+    public void unlock(MetadataTransactionContext ctx) throws RemoteException, ACIDException {
+        metadataNode.unlock(ctx.getJobId());
+    }
 
-	@Override
-	public void dropDataverse(MetadataTransactionContext ctx,
-			String dataverseName) throws MetadataException {
-		try {
-			metadataNode.dropDataverse(ctx.getJobId(), dataverseName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.dropDataverse(dataverseName);
-	}
+    @Override
+    public void addDataverse(MetadataTransactionContext ctx, Dataverse dataverse) throws MetadataException {
+        try {
+            metadataNode.addDataverse(ctx.getJobId(), dataverse);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.addDataverse(dataverse);
+    }
 
-	@Override
-	public List<Dataverse> getDataverses(MetadataTransactionContext ctx)
-			throws MetadataException {
-		try {
-			return metadataNode.getDataverses(ctx.getJobId());
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-	}
+    @Override
+    public void dropDataverse(MetadataTransactionContext ctx, String dataverseName) throws MetadataException {
+        try {
+            metadataNode.dropDataverse(ctx.getJobId(), dataverseName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.dropDataverse(dataverseName);
+    }
 
-	@Override
-	public Dataverse getDataverse(MetadataTransactionContext ctx,
-			String dataverseName) throws MetadataException {
-		// First look in the context to see if this transaction created the
-		// requested dataverse itself (but the dataverse is still uncommitted).
-		Dataverse dataverse = ctx.getDataverse(dataverseName);
-		if (dataverse != null) {
-			// Don't add this dataverse to the cache, since it is still
-			// uncommitted.
-			return dataverse;
-		}
-		if (ctx.dataverseIsDropped(dataverseName)) {
-			// Dataverse has been dropped by this transaction but could still be
-			// in the cache.
-			return null;
-		}
-		dataverse = cache.getDataverse(dataverseName);
-		if (dataverse != null) {
-			// Dataverse is already in the cache, don't add it again.
-			return dataverse;
-		}
-		try {
-			dataverse = metadataNode
-					.getDataverse(ctx.getJobId(), dataverseName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		// We fetched the dataverse from the MetadataNode. Add it to the cache
-		// when this transaction commits.
-		if (dataverse != null) {
-			ctx.addDataverse(dataverse);
-		}
-		return dataverse;
-	}
+    @Override
+    public List<Dataverse> getDataverses(MetadataTransactionContext ctx) throws MetadataException {
+        try {
+            return metadataNode.getDataverses(ctx.getJobId());
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+    }
 
-	@Override
-	public List<Dataset> getDataverseDatasets(MetadataTransactionContext ctx,
-			String dataverseName) throws MetadataException {
-		List<Dataset> dataverseDatasets;
-		try {
-			// Assuming that the transaction can read its own writes on the
-			// metadata node.
-			dataverseDatasets = metadataNode.getDataverseDatasets(
-					ctx.getJobId(), dataverseName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		// Don't update the cache to avoid checking against the transaction's
-		// uncommitted datasets.
-		return dataverseDatasets;
-	}
+    @Override
+    public Dataverse getDataverse(MetadataTransactionContext ctx, String dataverseName) throws MetadataException {
+        // First look in the context to see if this transaction created the
+        // requested dataverse itself (but the dataverse is still uncommitted).
+        Dataverse dataverse = ctx.getDataverse(dataverseName);
+        if (dataverse != null) {
+            // Don't add this dataverse to the cache, since it is still
+            // uncommitted.
+            return dataverse;
+        }
+        if (ctx.dataverseIsDropped(dataverseName)) {
+            // Dataverse has been dropped by this transaction but could still be
+            // in the cache.
+            return null;
+        }
+        dataverse = cache.getDataverse(dataverseName);
+        if (dataverse != null) {
+            // Dataverse is already in the cache, don't add it again.
+            return dataverse;
+        }
+        try {
+            dataverse = metadataNode.getDataverse(ctx.getJobId(), dataverseName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        // We fetched the dataverse from the MetadataNode. Add it to the cache
+        // when this transaction commits.
+        if (dataverse != null) {
+            ctx.addDataverse(dataverse);
+        }
+        return dataverse;
+    }
 
-	@Override
-	public void addDataset(MetadataTransactionContext ctx, Dataset dataset)
-			throws MetadataException {
-		// add dataset into metadataNode
-		try {
-			metadataNode.addDataset(ctx.getJobId(), dataset);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
+    @Override
+    public List<Dataset> getDataverseDatasets(MetadataTransactionContext ctx, String dataverseName)
+            throws MetadataException {
+        List<Dataset> dataverseDatasets;
+        try {
+            // Assuming that the transaction can read its own writes on the
+            // metadata node.
+            dataverseDatasets = metadataNode.getDataverseDatasets(ctx.getJobId(), dataverseName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        // Don't update the cache to avoid checking against the transaction's
+        // uncommitted datasets.
+        return dataverseDatasets;
+    }
 
-		// reflect the dataset into the cache
-		ctx.addDataset(dataset);
-	}
+    @Override
+    public void addDataset(MetadataTransactionContext ctx, Dataset dataset) throws MetadataException {
+        // add dataset into metadataNode 
+        try {
+            metadataNode.addDataset(ctx.getJobId(), dataset);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
 
-	@Override
-	public void dropDataset(MetadataTransactionContext ctx,
-			String dataverseName, String datasetName) throws MetadataException {
-		try {
-			metadataNode
-					.dropDataset(ctx.getJobId(), dataverseName, datasetName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.dropDataset(dataverseName, datasetName);
-	}
+        // reflect the dataset into the cache
+        ctx.addDataset(dataset);
+    }
 
-	@Override
-	public Dataset getDataset(MetadataTransactionContext ctx,
-			String dataverseName, String datasetName) throws MetadataException {
+    @Override
+    public void dropDataset(MetadataTransactionContext ctx, String dataverseName, String datasetName)
+            throws MetadataException {
+        try {
+            metadataNode.dropDataset(ctx.getJobId(), dataverseName, datasetName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.dropDataset(dataverseName, datasetName);
+    }
 
-		// First look in the context to see if this transaction created the
-		// requested dataset itself (but the dataset is still uncommitted).
-		Dataset dataset = ctx.getDataset(dataverseName, datasetName);
-		if (dataset != null) {
-			// Don't add this dataverse to the cache, since it is still
-			// uncommitted.
-			return dataset;
-		}
-		if (ctx.datasetIsDropped(dataverseName, datasetName)) {
-			// Dataset has been dropped by this transaction but could still be
-			// in the cache.
-			return null;
-		}
+    @Override
+    public Dataset getDataset(MetadataTransactionContext ctx, String dataverseName, String datasetName)
+            throws MetadataException {
 
-		dataset = cache.getDataset(dataverseName, datasetName);
-		if (dataset != null) {
-			// Dataset is already in the cache, don't add it again.
-			return dataset;
-		}
-		try {
-			dataset = metadataNode.getDataset(ctx.getJobId(), dataverseName,
-					datasetName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		// We fetched the dataset from the MetadataNode. Add it to the cache
-		// when this transaction commits.
-		if (dataset != null) {
-			ctx.addDataset(dataset);
-		}
-		return dataset;
-	}
+        // First look in the context to see if this transaction created the
+        // requested dataset itself (but the dataset is still uncommitted).
+        Dataset dataset = ctx.getDataset(dataverseName, datasetName);
+        if (dataset != null) {
+            // Don't add this dataverse to the cache, since it is still
+            // uncommitted.
+            return dataset;
+        }
+        if (ctx.datasetIsDropped(dataverseName, datasetName)) {
+            // Dataset has been dropped by this transaction but could still be
+            // in the cache.
+            return null;
+        }
 
-	@Override
-	public List<Index> getDatasetIndexes(MetadataTransactionContext ctx,
-			String dataverseName, String datasetName) throws MetadataException {
-		List<Index> datsetIndexes;
-		try {
-			datsetIndexes = metadataNode.getDatasetIndexes(ctx.getJobId(),
-					dataverseName, datasetName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		return datsetIndexes;
-	}
+        dataset = cache.getDataset(dataverseName, datasetName);
+        if (dataset != null) {
+            // Dataset is already in the cache, don't add it again.
+            return dataset;
+        }
+        try {
+            dataset = metadataNode.getDataset(ctx.getJobId(), dataverseName, datasetName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        // We fetched the dataset from the MetadataNode. Add it to the cache
+        // when this transaction commits.
+        if (dataset != null) {
+            ctx.addDataset(dataset);
+        }
+        return dataset;
+    }
 
-	@Override
-	public void addDatatype(MetadataTransactionContext ctx, Datatype datatype)
-			throws MetadataException {
-		try {
-			metadataNode.addDatatype(ctx.getJobId(), datatype);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.addDatatype(datatype);
-	}
+    @Override
+    public List<Index> getDatasetIndexes(MetadataTransactionContext ctx, String dataverseName, String datasetName)
+            throws MetadataException {
+        List<Index> datsetIndexes;
+        try {
+            datsetIndexes = metadataNode.getDatasetIndexes(ctx.getJobId(), dataverseName, datasetName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        return datsetIndexes;
+    }
 
-	@Override
-	public void dropDatatype(MetadataTransactionContext ctx,
-			String dataverseName, String datatypeName) throws MetadataException {
-		try {
-			metadataNode.dropDatatype(ctx.getJobId(), dataverseName,
-					datatypeName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.dropDataDatatype(dataverseName, datatypeName);
-	}
+    @Override
+    public void addDatatype(MetadataTransactionContext ctx, Datatype datatype) throws MetadataException {
+        try {
+            metadataNode.addDatatype(ctx.getJobId(), datatype);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.addDatatype(datatype);
+    }
 
-	@Override
-	public Datatype getDatatype(MetadataTransactionContext ctx,
-			String dataverseName, String datatypeName) throws MetadataException {
-		// First look in the context to see if this transaction created the
-		// requested datatype itself (but the datatype is still uncommitted).
-		Datatype datatype = ctx.getDatatype(dataverseName, datatypeName);
-		if (datatype != null) {
-			// Don't add this dataverse to the cache, since it is still
-			// uncommitted.
-			return datatype;
-		}
-		if (ctx.datatypeIsDropped(dataverseName, datatypeName)) {
-			// Datatype has been dropped by this transaction but could still be
-			// in the cache.
-			return null;
-		}
+    @Override
+    public void dropDatatype(MetadataTransactionContext ctx, String dataverseName, String datatypeName)
+            throws MetadataException {
+        try {
+            metadataNode.dropDatatype(ctx.getJobId(), dataverseName, datatypeName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.dropDataDatatype(dataverseName, datatypeName);
+    }
 
-		datatype = cache.getDatatype(dataverseName, datatypeName);
-		if (datatype != null) {
-			// Datatype is already in the cache, don't add it again.
-			return datatype;
-		}
-		try {
-			datatype = metadataNode.getDatatype(ctx.getJobId(), dataverseName,
-					datatypeName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		// We fetched the datatype from the MetadataNode. Add it to the cache
-		// when this transaction commits.
-		if (datatype != null) {
-			ctx.addDatatype(datatype);
-		}
-		return datatype;
-	}
+    @Override
+    public Datatype getDatatype(MetadataTransactionContext ctx, String dataverseName, String datatypeName)
+            throws MetadataException {
+        // First look in the context to see if this transaction created the
+        // requested datatype itself (but the datatype is still uncommitted).
+        Datatype datatype = ctx.getDatatype(dataverseName, datatypeName);
+        if (datatype != null) {
+            // Don't add this dataverse to the cache, since it is still
+            // uncommitted.
+            return datatype;
+        }
+        if (ctx.datatypeIsDropped(dataverseName, datatypeName)) {
+            // Datatype has been dropped by this transaction but could still be
+            // in the cache.
+            return null;
+        }
 
-	@Override
-	public void addIndex(MetadataTransactionContext ctx, Index index)
-			throws MetadataException {
-		try {
-			metadataNode.addIndex(ctx.getJobId(), index);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.addIndex(index);
-	}
+        datatype = cache.getDatatype(dataverseName, datatypeName);
+        if (datatype != null) {
+            // Datatype is already in the cache, don't add it again.
+            return datatype;
+        }
+        try {
+            datatype = metadataNode.getDatatype(ctx.getJobId(), dataverseName, datatypeName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        // We fetched the datatype from the MetadataNode. Add it to the cache
+        // when this transaction commits.
+        if (datatype != null) {
+            ctx.addDatatype(datatype);
+        }
+        return datatype;
+    }
 
-	@Override
-	public void addAdapter(MetadataTransactionContext mdTxnCtx,
-			DatasourceAdapter adapter) throws MetadataException {
-		try {
-			metadataNode.addAdapter(mdTxnCtx.getJobId(), adapter);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		mdTxnCtx.addAdapter(adapter);
+    @Override
+    public void addIndex(MetadataTransactionContext ctx, Index index) throws MetadataException {
+        try {
+            metadataNode.addIndex(ctx.getJobId(), index);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.addIndex(index);
+    }
 
-	}
+    @Override
+    public void addAdapter(MetadataTransactionContext mdTxnCtx, DatasourceAdapter adapter) throws MetadataException {
+        try {
+            metadataNode.addAdapter(mdTxnCtx.getJobId(), adapter);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        mdTxnCtx.addAdapter(adapter);
 
-	@Override
-	public void dropIndex(MetadataTransactionContext ctx, String dataverseName,
-			String datasetName, String indexName) throws MetadataException {
-		try {
-			metadataNode.dropIndex(ctx.getJobId(), dataverseName, datasetName,
-					indexName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.dropIndex(dataverseName, datasetName, indexName);
-	}
+    }
 
-	@Override
-	public Index getIndex(MetadataTransactionContext ctx, String dataverseName,
-			String datasetName, String indexName) throws MetadataException {
+    @Override
+    public void dropIndex(MetadataTransactionContext ctx, String dataverseName, String datasetName, String indexName)
+            throws MetadataException {
+        try {
+            metadataNode.dropIndex(ctx.getJobId(), dataverseName, datasetName, indexName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.dropIndex(dataverseName, datasetName, indexName);
+    }
 
-		// First look in the context to see if this transaction created the
-		// requested index itself (but the index is still uncommitted).
-		Index index = ctx.getIndex(dataverseName, datasetName, indexName);
-		if (index != null) {
-			// Don't add this index to the cache, since it is still
-			// uncommitted.
-			return index;
-		}
+    @Override
+    public Index getIndex(MetadataTransactionContext ctx, String dataverseName, String datasetName, String indexName)
+            throws MetadataException {
 
-		if (ctx.indexIsDropped(dataverseName, datasetName, indexName)) {
-			// Index has been dropped by this transaction but could still be
-			// in the cache.
-			return null;
-		}
+        // First look in the context to see if this transaction created the
+        // requested index itself (but the index is still uncommitted).
+        Index index = ctx.getIndex(dataverseName, datasetName, indexName);
+        if (index != null) {
+            // Don't add this index to the cache, since it is still
+            // uncommitted.
+            return index;
+        }
 
-		index = cache.getIndex(dataverseName, datasetName, indexName);
-		if (index != null) {
-			// Index is already in the cache, don't add it again.
-			return index;
-		}
-		try {
-			index = metadataNode.getIndex(ctx.getJobId(), dataverseName,
-					datasetName, indexName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		// We fetched the index from the MetadataNode. Add it to the cache
-		// when this transaction commits.
-		if (index != null) {
-			ctx.addIndex(index);
-		}
-		return index;
-	}
+        if (ctx.indexIsDropped(dataverseName, datasetName, indexName)) {
+            // Index has been dropped by this transaction but could still be
+            // in the cache.
+            return null;
+        }
 
-	@Override
-	public void addNode(MetadataTransactionContext ctx, Node node)
-			throws MetadataException {
-		try {
-			metadataNode.addNode(ctx.getJobId(), node);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-	}
+        index = cache.getIndex(dataverseName, datasetName, indexName);
+        if (index != null) {
+            // Index is already in the cache, don't add it again.
+            return index;
+        }
+        try {
+            index = metadataNode.getIndex(ctx.getJobId(), dataverseName, datasetName, indexName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        // We fetched the index from the MetadataNode. Add it to the cache
+        // when this transaction commits.
+        if (index != null) {
+            ctx.addIndex(index);
+        }
+        return index;
+    }
 
-	@Override
-	public void addNodegroup(MetadataTransactionContext ctx, NodeGroup nodeGroup)
-			throws MetadataException {
-		try {
-			metadataNode.addNodeGroup(ctx.getJobId(), nodeGroup);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.addNogeGroup(nodeGroup);
-	}
+    @Override
+    public void addNode(MetadataTransactionContext ctx, Node node) throws MetadataException {
+        try {
+            metadataNode.addNode(ctx.getJobId(), node);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+    }
 
-	@Override
-	public void dropNodegroup(MetadataTransactionContext ctx,
-			String nodeGroupName) throws MetadataException {
-		try {
-			metadataNode.dropNodegroup(ctx.getJobId(), nodeGroupName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.dropNodeGroup(nodeGroupName);
-	}
+    @Override
+    public void addNodegroup(MetadataTransactionContext ctx, NodeGroup nodeGroup) throws MetadataException {
+        try {
+            metadataNode.addNodeGroup(ctx.getJobId(), nodeGroup);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.addNogeGroup(nodeGroup);
+    }
 
-	@Override
-	public NodeGroup getNodegroup(MetadataTransactionContext ctx,
-			String nodeGroupName) throws MetadataException {
-		// First look in the context to see if this transaction created the
-		// requested dataverse itself (but the dataverse is still uncommitted).
-		NodeGroup nodeGroup = ctx.getNodeGroup(nodeGroupName);
-		if (nodeGroup != null) {
-			// Don't add this dataverse to the cache, since it is still
-			// uncommitted.
-			return nodeGroup;
-		}
-		if (ctx.nodeGroupIsDropped(nodeGroupName)) {
-			// NodeGroup has been dropped by this transaction but could still be
-			// in the cache.
-			return null;
-		}
-		nodeGroup = cache.getNodeGroup(nodeGroupName);
-		if (nodeGroup != null) {
-			// NodeGroup is already in the cache, don't add it again.
-			return nodeGroup;
-		}
-		try {
-			nodeGroup = metadataNode
-					.getNodeGroup(ctx.getJobId(), nodeGroupName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		// We fetched the nodeGroup from the MetadataNode. Add it to the cache
-		// when this transaction commits.
-		if (nodeGroup != null) {
-			ctx.addNogeGroup(nodeGroup);
-		}
-		return nodeGroup;
-	}
+    @Override
+    public void dropNodegroup(MetadataTransactionContext ctx, String nodeGroupName) throws MetadataException {
+        try {
+            metadataNode.dropNodegroup(ctx.getJobId(), nodeGroupName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.dropNodeGroup(nodeGroupName);
+    }
 
-	@Override
-	public void addFunction(MetadataTransactionContext mdTxnCtx,
-			Function function) throws MetadataException {
-		try {
-			metadataNode.addFunction(mdTxnCtx.getJobId(), function);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		mdTxnCtx.addFunction(function);
-	}
+    @Override
+    public NodeGroup getNodegroup(MetadataTransactionContext ctx, String nodeGroupName) throws MetadataException {
+        // First look in the context to see if this transaction created the
+        // requested dataverse itself (but the dataverse is still uncommitted).
+        NodeGroup nodeGroup = ctx.getNodeGroup(nodeGroupName);
+        if (nodeGroup != null) {
+            // Don't add this dataverse to the cache, since it is still
+            // uncommitted.
+            return nodeGroup;
+        }
+        if (ctx.nodeGroupIsDropped(nodeGroupName)) {
+            // NodeGroup has been dropped by this transaction but could still be
+            // in the cache.
+            return null;
+        }
+        nodeGroup = cache.getNodeGroup(nodeGroupName);
+        if (nodeGroup != null) {
+            // NodeGroup is already in the cache, don't add it again.
+            return nodeGroup;
+        }
+        try {
+            nodeGroup = metadataNode.getNodeGroup(ctx.getJobId(), nodeGroupName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        // We fetched the nodeGroup from the MetadataNode. Add it to the cache
+        // when this transaction commits.
+        if (nodeGroup != null) {
+            ctx.addNogeGroup(nodeGroup);
+        }
+        return nodeGroup;
+    }
 
-	@Override
-	public void dropFunction(MetadataTransactionContext ctx,
-			FunctionSignature functionSignature) throws MetadataException {
-		try {
-			metadataNode.dropFunction(ctx.getJobId(), functionSignature);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.dropFunction(functionSignature);
-	}
+    @Override
+    public void addFunction(MetadataTransactionContext mdTxnCtx, Function function) throws MetadataException {
+        try {
+            metadataNode.addFunction(mdTxnCtx.getJobId(), function);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        mdTxnCtx.addFunction(function);
+    }
 
-	@Override
-	public Function getFunction(MetadataTransactionContext ctx,
-			FunctionSignature functionSignature) throws MetadataException {
-		// First look in the context to see if this transaction created the
-		// requested dataset itself (but the dataset is still uncommitted).
-		Function function = ctx.getFunction(functionSignature);
-		if (function != null) {
-			// Don't add this dataverse to the cache, since it is still
-			// uncommitted.
-			return function;
-		}
-		if (ctx.functionIsDropped(functionSignature)) {
-			// Function has been dropped by this transaction but could still be
-			// in the cache.
-			return null;
-		}
-		if (ctx.getDataverse(functionSignature.getNamespace()) != null) {
-			// This transaction has dropped and subsequently created the same
-			// dataverse.
-			return null;
-		}
-		function = cache.getFunction(functionSignature);
-		if (function != null) {
-			// Function is already in the cache, don't add it again.
-			return function;
-		}
-		try {
-			function = metadataNode.getFunction(ctx.getJobId(),
-					functionSignature);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		// We fetched the function from the MetadataNode. Add it to the cache
-		// when this transaction commits.
-		if (function != null) {
-			ctx.addFunction(function);
-		}
-		return function;
+    @Override
+    public void dropFunction(MetadataTransactionContext ctx, FunctionSignature functionSignature)
+            throws MetadataException {
+        try {
+            metadataNode.dropFunction(ctx.getJobId(), functionSignature);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.dropFunction(functionSignature);
+    }
 
-	}
+    @Override
+    public Function getFunction(MetadataTransactionContext ctx, FunctionSignature functionSignature)
+            throws MetadataException {
+        // First look in the context to see if this transaction created the
+        // requested dataset itself (but the dataset is still uncommitted).
+        Function function = ctx.getFunction(functionSignature);
+        if (function != null) {
+            // Don't add this dataverse to the cache, since it is still
+            // uncommitted.
+            return function;
+        }
+        if (ctx.functionIsDropped(functionSignature)) {
+            // Function has been dropped by this transaction but could still be
+            // in the cache.
+            return null;
+        }
+        if (ctx.getDataverse(functionSignature.getNamespace()) != null) {
+            // This transaction has dropped and subsequently created the same
+            // dataverse.
+            return null;
+        }
+        function = cache.getFunction(functionSignature);
+        if (function != null) {
+            // Function is already in the cache, don't add it again.
+            return function;
+        }
+        try {
+            function = metadataNode.getFunction(ctx.getJobId(), functionSignature);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        // We fetched the function from the MetadataNode. Add it to the cache
+        // when this transaction commits.
+        if (function != null) {
+            ctx.addFunction(function);
+        }
+        return function;
 
-	@Override
-	public void initializeDatasetIdFactory(MetadataTransactionContext ctx)
-			throws MetadataException {
-		try {
-			metadataNode.initializeDatasetIdFactory(ctx.getJobId());
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-	}
+    }
 
-	@Override
-	public List<Function> getDataverseFunctions(MetadataTransactionContext ctx,
-			String dataverseName) throws MetadataException {
-		List<Function> dataverseFunctions;
-		try {
-			// Assuming that the transaction can read its own writes on the
-			// metadata node.
-			dataverseFunctions = metadataNode.getDataverseFunctions(
-					ctx.getJobId(), dataverseName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		// Don't update the cache to avoid checking against the transaction's
-		// uncommitted functions.
-		return dataverseFunctions;
-	}
+    @Override
+    public void initializeDatasetIdFactory(MetadataTransactionContext ctx) throws MetadataException {
+        try {
+            metadataNode.initializeDatasetIdFactory(ctx.getJobId());
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+    }
 
-	@Override
-	public void dropAdapter(MetadataTransactionContext ctx,
-			String dataverseName, String name) throws MetadataException {
-		try {
-			metadataNode.dropAdapter(ctx.getJobId(), dataverseName, name);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-	}
+    @Override
+    public int getMostRecentDatasetId() throws MetadataException {
+        try {
+            return metadataNode.getMostRecentDatasetId();
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+    }
 
-	@Override
-	public DatasourceAdapter getAdapter(MetadataTransactionContext ctx,
-			String dataverseName, String name) throws MetadataException {
-		DatasourceAdapter adapter = null;
-		try {
-			adapter = metadataNode.getAdapter(ctx.getJobId(), dataverseName,
-					name);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		return adapter;
-	}
+    @Override
+    public List<Function> getDataverseFunctions(MetadataTransactionContext ctx, String dataverseName)
+            throws MetadataException {
+        List<Function> dataverseFunctions;
+        try {
+            // Assuming that the transaction can read its own writes on the
+            // metadata node.
+            dataverseFunctions = metadataNode.getDataverseFunctions(ctx.getJobId(), dataverseName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        // Don't update the cache to avoid checking against the transaction's
+        // uncommitted functions.
+        return dataverseFunctions;
+    }
 
-	@Override
-	public void dropLibrary(MetadataTransactionContext ctx,
-			String dataverseName, String libraryName) throws MetadataException {
-		try {
-			metadataNode
-					.dropLibrary(ctx.getJobId(), dataverseName, libraryName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.dropLibrary(dataverseName, libraryName);
-	}
+    @Override
+    public void dropAdapter(MetadataTransactionContext ctx, String dataverseName, String name) throws MetadataException {
+        try {
+            metadataNode.dropAdapter(ctx.getJobId(), dataverseName, name);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+    }
 
-	@Override
-	public List<Library> getDataverseLibraries(MetadataTransactionContext ctx,
-			String dataverseName) throws MetadataException {
-		List<Library> dataverseLibaries = null;
-		try {
-			// Assuming that the transaction can read its own writes on the
-			// metadata node.
-			dataverseLibaries = metadataNode.getDataverseLibraries(
-					ctx.getJobId(), dataverseName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		// Don't update the cache to avoid checking against the transaction's
-		// uncommitted functions.
-		return dataverseLibaries;
-	}
+    @Override
+    public DatasourceAdapter getAdapter(MetadataTransactionContext ctx, String dataverseName, String name)
+            throws MetadataException {
+        DatasourceAdapter adapter = null;
+        try {
+            adapter = metadataNode.getAdapter(ctx.getJobId(), dataverseName, name);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        return adapter;
+    }
 
-	@Override
-	public void addLibrary(MetadataTransactionContext ctx, Library library)
-			throws MetadataException {
-		try {
-			metadataNode.addLibrary(ctx.getJobId(), library);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		ctx.addLibrary(library);
-	}
+    @Override
+    public void dropLibrary(MetadataTransactionContext ctx,
+            String dataverseName, String libraryName) throws MetadataException {
+        try {
+            metadataNode
+                    .dropLibrary(ctx.getJobId(), dataverseName, libraryName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.dropLibrary(dataverseName, libraryName);
+    }
 
-	@Override
-	public Library getLibrary(MetadataTransactionContext ctx,
-			String dataverseName, String libraryName) throws MetadataException,
-			RemoteException {
-		Library library = null;
-		try {
-			library = metadataNode.getLibrary(ctx.getJobId(), dataverseName,
-					libraryName);
-		} catch (RemoteException e) {
-			throw new MetadataException(e);
-		}
-		return library;
-	}
+    @Override
+    public List<Library> getDataverseLibraries(MetadataTransactionContext ctx,
+            String dataverseName) throws MetadataException {
+        List<Library> dataverseLibaries = null;
+        try {
+            // Assuming that the transaction can read its own writes on the
+            // metadata node.
+            dataverseLibaries = metadataNode.getDataverseLibraries(
+                    ctx.getJobId(), dataverseName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        // Don't update the cache to avoid checking against the transaction's
+        // uncommitted functions.
+        return dataverseLibaries;
+    }
 
-	@Override
-	public void acquireWriteLatch() {
-		metadataLatch.writeLock().lock();
-	}
+    @Override
+    public void addLibrary(MetadataTransactionContext ctx, Library library)
+            throws MetadataException {
+        try {
+            metadataNode.addLibrary(ctx.getJobId(), library);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        ctx.addLibrary(library);
+    }
 
-	@Override
-	public void releaseWriteLatch() {
-		metadataLatch.writeLock().unlock();
-	}
+    @Override
+    public Library getLibrary(MetadataTransactionContext ctx,
+            String dataverseName, String libraryName) throws MetadataException,
+            RemoteException {
+        Library library = null;
+        try {
+            library = metadataNode.getLibrary(ctx.getJobId(), dataverseName,
+                    libraryName);
+        } catch (RemoteException e) {
+            throw new MetadataException(e);
+        }
+        return library;
+    }
 
-	@Override
-	public void acquireReadLatch() {
-		metadataLatch.readLock().lock();
-	}
 
-	@Override
-	public void releaseReadLatch() {
-		metadataLatch.readLock().unlock();
-	}
+    @Override
+    public void acquireWriteLatch() {
+        metadataLatch.writeLock().lock();
+    }
 
+    @Override
+    public void releaseWriteLatch() {
+        metadataLatch.writeLock().unlock();
+    }
+
+    @Override
+    public void acquireReadLatch() {
+        metadataLatch.readLock().lock();
+    }
+
+    @Override
+    public void releaseReadLatch() {
+        metadataLatch.readLock().unlock();
+    }
 }
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
index 90b72a2..34b2767 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
@@ -1207,4 +1207,7 @@
         }
     }
 
+    public int getMostRecentDatasetId() throws MetadataException, RemoteException {
+        return DatasetIdFactory.getMostRecentDatasetId();
+    }
 }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IAsterixStateProxy.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IAsterixStateProxy.java
index 5f772c7..7f06c30 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IAsterixStateProxy.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IAsterixStateProxy.java
@@ -19,17 +19,11 @@
 import java.rmi.Remote;
 import java.rmi.RemoteException;
 
-import edu.uci.ics.asterix.metadata.bootstrap.AsterixProperties;
-
 /**
  * Interface for setting/getting distributed state of Asterix.
  */
 public interface IAsterixStateProxy extends Remote, Serializable {
     public void setMetadataNode(IMetadataNode metadataNode) throws RemoteException;
 
-    public void setAsterixProperties(AsterixProperties asterixProperties) throws RemoteException;
-
     public IMetadataNode getMetadataNode() throws RemoteException;
-
-    public AsterixProperties getAsterixProperties() throws RemoteException;
 }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java
index 46b1f80..f7809df 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java
@@ -8,7 +8,7 @@
  * 
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * WIThOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
@@ -443,6 +443,8 @@
 
     public void initializeDatasetIdFactory(MetadataTransactionContext ctx) throws MetadataException;
 
+    public int getMostRecentDatasetId() throws MetadataException;
+    
     public void acquireWriteLatch();
 
     public void releaseWriteLatch();
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java
index a27dae2..6b1dee4 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java
@@ -474,6 +474,8 @@
     public void addAdapter(JobId jobId, DatasourceAdapter adapter) throws MetadataException, RemoteException;
 
     public void initializeDatasetIdFactory(JobId jobId) throws MetadataException, RemoteException;
+    
+    public int getMostRecentDatasetId() throws MetadataException, RemoteException;
 
     /**
      * Removes a library , acquiring local locks on behalf of the given
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/AsterixProperties.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/AsterixProperties.java
deleted file mode 100644
index 71a7de3..0000000
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/AsterixProperties.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.metadata.bootstrap;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.Serializable;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Properties;
-
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-
-/**
- * Holder for Asterix properties values typically set as Java Properties.
- * Intended to live in the AsterixStateProxy so it can be accessed remotely.
- */
-public class AsterixProperties implements Serializable {
-
-    private static final long serialVersionUID = 1L;
-    private static String metadataNodeName;
-    private static Boolean isNewUniverse;
-    private static HashSet<String> nodeNames;
-    private static Map<String, String[]> stores;
-    private static String outputDir;
-
-    public static AsterixProperties INSTANCE = new AsterixProperties();
-
-    private AsterixProperties() {
-        try {
-            initializeProperties();
-        } catch (Exception e) {
-            throw new IllegalStateException(e);
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    private void initializeProperties() throws AlgebricksException {
-        Properties p = new Properties();
-        String fileName = System.getProperty(GlobalConfig.CONFIG_FILE_PROPERTY);
-        if (fileName == null) {
-            fileName = GlobalConfig.DEFAULT_CONFIG_FILE_NAME;
-        }
-
-        InputStream is = this.getClass().getClassLoader().getResourceAsStream(fileName);
-        if (is == null) {
-            try {
-                fileName = GlobalConfig.DEFAULT_CONFIG_FILE_NAME;
-                is = new FileInputStream(fileName);
-            } catch (FileNotFoundException fnf) {
-                throw new AlgebricksException("Could not find the configuration file " + fileName);
-            }
-        }
-        try {
-            p.load(is);
-            is.close();
-        } catch (IOException e) {
-            throw new AlgebricksException(e);
-        }
-        Enumeration<String> pNames = (Enumeration<String>) p.propertyNames();
-        stores = new HashMap<String, String[]>();
-        boolean newUniverseChosen = false;
-        String pn;
-        String val;
-        while (pNames.hasMoreElements()) {
-            pn = pNames.nextElement();
-            if (pn.equals("MetadataNode")) {
-                val = p.getProperty(pn);
-                metadataNodeName = val;
-            } else if (pn.equals("NewUniverse")) {
-                val = p.getProperty(pn);
-                newUniverseChosen = true;
-                isNewUniverse = Boolean.parseBoolean(val);
-            } else if (pn.equals("OutputDir")) {
-                val = p.getProperty(pn);
-                outputDir = val;
-            } else {
-                String ncName = pn.substring(0, pn.indexOf('.'));
-                val = p.getProperty(pn);
-                String[] folderNames = val.split("\\s*,\\s*");
-                int i = 0;
-                for (String store : folderNames) {
-                    boolean needsStartSep = !store.startsWith(File.separator);
-                    boolean needsEndSep = !store.endsWith(File.separator);
-                    if (needsStartSep && needsEndSep) {
-                        folderNames[i] = File.separator + store + File.separator;
-                    } else if (needsStartSep) {
-                        folderNames[i] = File.separator + store;
-                    } else if (needsEndSep) {
-                        folderNames[i] = store + File.separator;
-                    }
-                    i++;
-                }
-                stores.put(ncName, folderNames);
-                nodeNames = new HashSet<String>();
-                nodeNames.addAll(stores.keySet());
-            }
-        }
-        if (metadataNodeName == null)
-            throw new AlgebricksException("You need to specify the metadata node!");
-        if (!newUniverseChosen)
-            throw new AlgebricksException("You need to specify whether or not you want to start a new universe!");
-    }
-
-    public Boolean isNewUniverse() {
-        return isNewUniverse;
-    }
-
-    public String getMetadataNodeName() {
-        return metadataNodeName;
-    }
-
-    public String getMetadataStore() {
-        return stores.get(metadataNodeName)[0];
-    }
-
-    public Map<String, String[]> getStores() {
-        return stores;
-    }
-
-    public HashSet<String> getNodeNames() {
-        return nodeNames;
-    }
-
-    public String getOutputDir() {
-        return outputDir;
-    }
-}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/AsterixStateProxy.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/AsterixStateProxy.java
index 3946fa6..33f6994 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/AsterixStateProxy.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/AsterixStateProxy.java
@@ -30,7 +30,6 @@
     private static final Logger LOGGER = Logger.getLogger(AsterixStateProxy.class.getName());
 
     private IMetadataNode metadataNode;
-    private AsterixProperties asterixProperties;
     private static final IAsterixStateProxy cc = new AsterixStateProxy();
 
     public static IAsterixStateProxy registerRemoteObject() throws RemoteException {
@@ -53,14 +52,4 @@
     public IMetadataNode getMetadataNode() throws RemoteException {
         return this.metadataNode;
     }
-
-    @Override
-    public void setAsterixProperties(AsterixProperties asterixProperity) throws RemoteException {
-        this.asterixProperties = asterixProperity;
-    }
-
-    @Override
-    public AsterixProperties getAsterixProperties() throws RemoteException {
-        return this.asterixProperties;
-    }
 }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
index 0bd2a46..3504360 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -20,14 +20,17 @@
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Set;
+import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
 import edu.uci.ics.asterix.common.context.AsterixAppRuntimeContext;
 import edu.uci.ics.asterix.common.context.AsterixRuntimeComponentsProvider;
 import edu.uci.ics.asterix.external.adapter.factory.IAdapterFactory;
@@ -108,12 +111,14 @@
 
     private static String metadataNodeName;
     private static String metadataStore;
-    private static HashSet<String> nodeNames;
+    private static Set<String> nodeNames;
     private static String outputDir;
 
     private static IMetadataIndex[] primaryIndexes;
     private static IMetadataIndex[] secondaryIndexes;
 
+    private static IAsterixPropertiesProvider propertiesProvider;
+
     private static void initLocalIndexArrays() {
         primaryIndexes = new IMetadataIndex[] { MetadataPrimaryIndexes.DATAVERSE_DATASET,
                 MetadataPrimaryIndexes.DATASET_DATASET, MetadataPrimaryIndexes.DATATYPE_DATASET,
@@ -125,9 +130,10 @@
                 MetadataSecondaryIndexes.DATATYPENAME_ON_DATATYPE_INDEX };
     }
 
-    public static void startUniverse(AsterixProperties asterixProperties, INCApplicationContext ncApplicationContext,
-            boolean isNewUniverse) throws Exception {
+    public static void startUniverse(IAsterixPropertiesProvider asterixPropertiesProvider,
+            INCApplicationContext ncApplicationContext, boolean isNewUniverse) throws Exception {
         runtimeContext = (AsterixAppRuntimeContext) ncApplicationContext.getApplicationObject();
+        propertiesProvider = asterixPropertiesProvider;
 
         // Initialize static metadata objects, such as record types and metadata
         // index descriptors.
@@ -147,29 +153,25 @@
         resourceRepository.registerTransactionalResourceManager(ResourceType.LSM_INVERTED_INDEX,
                 new IndexResourceManager(ResourceType.LSM_INVERTED_INDEX, runtimeContext.getTransactionSubsystem()));
 
-        metadataNodeName = asterixProperties.getMetadataNodeName();
-        metadataStore = asterixProperties.getMetadataStore();
-        nodeNames = asterixProperties.getNodeNames();
+        AsterixMetadataProperties metadataProperties = propertiesProvider.getMetadataProperties();
+        metadataNodeName = metadataProperties.getMetadataNodeName();
+        metadataStore = metadataProperties.getMetadataStore();
+        nodeNames = metadataProperties.getNodeNames();
         // nodeStores = asterixProperity.getStores();
 
-        outputDir = asterixProperties.getOutputDir();
-        if (outputDir != null) {
-            (new File(outputDir)).mkdirs();
-        }
-
         indexLifecycleManager = runtimeContext.getIndexLifecycleManager();
         localResourceRepository = runtimeContext.getLocalResourceRepository();
         bufferCache = runtimeContext.getBufferCache();
         fileMapProvider = runtimeContext.getFileMapManager();
         ioManager = ncApplicationContext.getRootContext().getIOManager();
 
-        if (isNewUniverse) {
-            MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            try {
-                // Begin a transaction against the metadata.
-                // Lock the metadata in X mode.
-                MetadataManager.INSTANCE.lock(mdTxnCtx, LockMode.X);
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        try {
+            // Begin a transaction against the metadata.
+            // Lock the metadata in X mode.
+            MetadataManager.INSTANCE.lock(mdTxnCtx, LockMode.X);
 
+            if (isNewUniverse) {
                 for (int i = 0; i < primaryIndexes.length; i++) {
                     enlistMetadataDataset(primaryIndexes[i], true);
                 }
@@ -184,23 +186,35 @@
                 insertInitialGroups(mdTxnCtx);
                 insertInitialAdapters(mdTxnCtx);
 
-                MetadataManager.INSTANCE.initializeDatasetIdFactory(mdTxnCtx);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            } catch (Exception e) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-                throw e;
-            }
-            LOGGER.info("FINISHED CREATING METADATA B-TREES.");
-        } else {
-            for (int i = 0; i < primaryIndexes.length; i++) {
-                enlistMetadataDataset(primaryIndexes[i], false);
-            }
-            for (int i = 0; i < secondaryIndexes.length; i++) {
-                enlistMetadataDataset(secondaryIndexes[i], false);
-            }
-            LOGGER.info("FINISHED ENLISTMENT OF METADATA B-TREES.");
-        }
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Finished creating metadata B-trees.");
+                }
+            } else {
+                for (int i = 0; i < primaryIndexes.length; i++) {
+                    enlistMetadataDataset(primaryIndexes[i], false);
+                }
+                for (int i = 0; i < secondaryIndexes.length; i++) {
+                    enlistMetadataDataset(secondaryIndexes[i], false);
+                }
 
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Finished enlistment of metadata B-trees.");
+                }
+            }
+
+            //#. initialize datasetIdFactory
+            MetadataManager.INSTANCE.initializeDatasetIdFactory(mdTxnCtx);
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        } catch (Exception e) {
+            try {
+                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            } catch (Exception e2) {
+                e.addSuppressed(e2);
+                //TODO
+                //change the exception type to AbortFailureException
+                throw new MetadataException(e);
+            }
+        }
     }
 
     public static void stopUniverse() throws HyracksDataException {
@@ -318,8 +332,8 @@
                 adapterFactoryClassName, DatasourceAdapter.AdapterType.INTERNAL);
     }
 
-    public static void enlistMetadataDataset(IMetadataIndex index, boolean create) throws Exception {
-        String filePath = metadataStore + index.getFileNameRelativePath();
+    private static void enlistMetadataDataset(IMetadataIndex index, boolean create) throws Exception {
+        String filePath = metadataStore + File.separator + index.getFileNameRelativePath();
         FileReference file = new FileReference(new File(filePath));
         IInMemoryBufferCache memBufferCache = new InMemoryBufferCache(new HeapBufferAllocator(), DEFAULT_MEM_PAGE_SIZE,
                 DEFAULT_MEM_NUM_PAGES, new TransientFileMapManager());
@@ -340,9 +354,10 @@
             resourceID = runtimeContext.getResourceIdFactory().createId();
             indexLifecycleManager.register(resourceID, lsmBtree);
 
+            AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
             ILocalResourceMetadata localResourceMetadata = new LSMBTreeLocalResourceMetadata(typeTraits,
                     comparatorFactories, bloomFilterKeyFields, index.isPrimaryIndex(),
-                    GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES);
+                    storageProperties.getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages());
             ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
                     localResourceMetadata, LocalResource.LSMBTreeResource);
             ILocalResourceFactory localResourceFactory = localResourceFactoryProvider.getLocalResourceFactory();
@@ -383,6 +398,10 @@
 
         MetadataManager.INSTANCE.acquireWriteLatch();
 
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Starting DDL recovery ...");
+        }
+
         try {
             mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
 
@@ -392,6 +411,9 @@
                 if (dataverse.getPendingOp() != IMetadataEntity.PENDING_NO_OP) {
                     //drop pending dataverse
                     MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Dropped a pending dataverse: " + dataverseName);
+                    }
                 } else {
                     List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
                     for (Dataset dataset : datasets) {
@@ -399,6 +421,9 @@
                         if (dataset.getPendingOp() != IMetadataEntity.PENDING_NO_OP) {
                             //drop pending dataset
                             MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Dropped a pending dataset: " + dataverseName + "." + datasetName);
+                            }
                         } else {
                             List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
                                     datasetName);
@@ -407,14 +432,25 @@
                                 if (index.getPendingOp() != IMetadataEntity.PENDING_NO_OP) {
                                     //drop pending index
                                     MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
+                                    if (LOGGER.isLoggable(Level.INFO)) {
+                                        LOGGER.info("Dropped a pending index: " + dataverseName + "." + datasetName
+                                                + "." + indexName);
+                                    }
                                 }
                             }
                         }
                     }
                 }
             }
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Completed DDL recovery.");
+            }
         } catch (Exception e) {
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            try {
+                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            } catch (Exception e2) {
+                e.addSuppressed(e2);
+            }
             throw new MetadataException(e);
         } finally {
             MetadataManager.INSTANCE.releaseWriteLatch();
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
index 35a9b83..f79b9b7 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
@@ -22,6 +22,8 @@
 import java.util.logging.Logger;
 
 import edu.uci.ics.asterix.common.annotations.TypeDataGen;
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfo;
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.base.IDataFormat;
@@ -29,7 +31,6 @@
 import edu.uci.ics.asterix.metadata.MetadataManager;
 import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
 import edu.uci.ics.asterix.metadata.api.IMetadataManager;
-import edu.uci.ics.asterix.metadata.bootstrap.AsterixProperties;
 import edu.uci.ics.asterix.metadata.entities.Dataset;
 import edu.uci.ics.asterix.metadata.entities.Datatype;
 import edu.uci.ics.asterix.metadata.entities.Dataverse;
@@ -73,8 +74,9 @@
         this.dataverseName = dataverseName;
         this.outputFile = outputFile;
         this.config = config;
+        AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.getInstance().getMetadataProperties();
         if (stores == null && online) {
-            this.stores = AsterixProperties.INSTANCE.getStores();
+            this.stores = metadataProperties.getStores();
         } else {
             this.stores = stores;
         }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
index e1f707c..4773ed0 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
@@ -23,6 +23,8 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfo;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
 import edu.uci.ics.asterix.common.config.GlobalConfig;
@@ -47,7 +49,6 @@
 import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.MetadataManager;
 import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
-import edu.uci.ics.asterix.metadata.bootstrap.AsterixProperties;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
 import edu.uci.ics.asterix.metadata.dataset.hints.DatasetHints.DatasetCardinalityHint;
 import edu.uci.ics.asterix.metadata.entities.Dataset;
@@ -151,6 +152,8 @@
     private final Dataverse defaultDataverse;
     private JobId jobId;
 
+    private final AsterixStorageProperties storageProperties;
+
     private static final Map<String, String> adapterFactoryMapping = initializeAdapterFactoryMapping();
 
     public String getPropertyValue(String propertyName) {
@@ -171,7 +174,8 @@
 
     public AqlMetadataProvider(Dataverse defaultDataverse) {
         this.defaultDataverse = defaultDataverse;
-        this.stores = AsterixProperties.INSTANCE.getStores();
+        this.stores = AsterixAppContextInfo.getInstance().getMetadataProperties().getStores();
+        this.storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
     }
 
     public void setJobId(JobId jobId) {
@@ -543,8 +547,8 @@
                             AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                            GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES),
-                    retainInput, searchCallbackFactory);
+                            storageProperties.getMemoryComponentPageSize(),
+                            storageProperties.getMemoryComponentNumPages()), retainInput, searchCallbackFactory);
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeSearchOp, spPc.second);
 
         } catch (MetadataException me) {
@@ -612,8 +616,8 @@
                             AsterixRuntimeComponentsProvider.LSMRTREE_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMRTREE_PROVIDER, proposeLinearizer(
                                     nestedKeyType.getTypeTag(), comparatorFactories.length),
-                            GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES),
-                    retainInput, searchCallbackFactory);
+                            storageProperties.getMemoryComponentPageSize(),
+                            storageProperties.getMemoryComponentNumPages()), retainInput, searchCallbackFactory);
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(rtreeSearchOp, spPc.second);
 
         } catch (MetadataException me) {
@@ -643,7 +647,6 @@
         ResultSetDataSink rsds = (ResultSetDataSink) sink;
         ResultSetSinkId rssId = (ResultSetSinkId) rsds.getId();
         ResultSetId rsId = rssId.getResultSetId();
-        String nodeName = rssId.getResultNodeName();
 
         ResultWriterOperatorDescriptor resultWriter = null;
         try {
@@ -654,8 +657,7 @@
             throw new AlgebricksException(e);
         }
 
-        AlgebricksPartitionConstraint apc = new AlgebricksAbsolutePartitionConstraint(new String[] { nodeName });
-        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(resultWriter, apc);
+        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(resultWriter, null);
     }
 
     @Override
@@ -774,8 +776,8 @@
                             AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                            GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES),
-                    NoOpOperationCallbackFactory.INSTANCE);
+                            storageProperties.getMemoryComponentPageSize(),
+                            storageProperties.getMemoryComponentNumPages()), NoOpOperationCallbackFactory.INSTANCE);
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeBulkLoad,
                     splitsAndConstraint.second);
         } catch (MetadataException me) {
@@ -841,9 +843,9 @@
                     new LSMBTreeDataflowHelperFactory(AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                            AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                            GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES), null,
-                    modificationCallbackFactory);
+                            AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, storageProperties
+                                    .getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()),
+                    null, modificationCallbackFactory);
 
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeBulkLoad,
                     splitsAndConstraint.second);
@@ -1037,8 +1039,8 @@
                     new LSMBTreeDataflowHelperFactory(AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                            AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER,
-                            GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES),
+                            AsterixRuntimeComponentsProvider.LSMBTREE_PROVIDER, storageProperties
+                                    .getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()),
                     filterFactory, modificationCallbackFactory);
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeBulkLoad,
                     splitsAndConstraint.second);
@@ -1163,8 +1165,8 @@
                             AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
-                            AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER,
-                            GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES),
+                            AsterixRuntimeComponentsProvider.LSMINVERTEDINDEX_PROVIDER, storageProperties
+                                    .getMemoryComponentPageSize(), storageProperties.getMemoryComponentNumPages()),
                     filterFactory, modificationCallbackFactory);
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(insertDeleteOp,
                     splitsAndConstraint.second);
@@ -1259,8 +1261,8 @@
                             AsterixRuntimeComponentsProvider.LSMRTREE_PROVIDER,
                             AsterixRuntimeComponentsProvider.LSMRTREE_PROVIDER, proposeLinearizer(
                                     nestedKeyType.getTypeTag(), comparatorFactories.length),
-                            GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES),
-                    filterFactory, modificationCallbackFactory);
+                            storageProperties.getMemoryComponentPageSize(),
+                            storageProperties.getMemoryComponentNumPages()), filterFactory, modificationCallbackFactory);
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(rtreeUpdate, splitsAndConstraint.second);
         } catch (MetadataException | IOException e) {
             throw new AlgebricksException(e);
@@ -1297,6 +1299,17 @@
     public Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraintsForInternalOrFeedDataset(
             String dataverseName, String datasetName, String targetIdxName) throws AlgebricksException {
         FileSplit[] splits = splitsForInternalOrFeedDataset(mdTxnCtx, dataverseName, datasetName, targetIdxName);
+        return splitProviderAndPartitionConstraints(splits);
+    }
+
+    public Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraintsForDataverse(
+            String dataverse) {
+        FileSplit[] splits = splitsForDataverse(mdTxnCtx, dataverse);
+        return splitProviderAndPartitionConstraints(splits);
+    }
+
+    private Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraints(
+            FileSplit[] splits) {
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
         String[] loc = new String[splits.length];
         for (int p = 0; p < splits.length; p++) {
@@ -1306,6 +1319,23 @@
         return new Pair<IFileSplitProvider, AlgebricksPartitionConstraint>(splitProvider, pc);
     }
 
+    private FileSplit[] splitsForDataverse(MetadataTransactionContext mdTxnCtx, String dataverseName) {
+        File relPathFile = new File(dataverseName);
+        List<FileSplit> splits = new ArrayList<FileSplit>();
+        for (Map.Entry<String, String[]> entry : stores.entrySet()) {
+            String node = entry.getKey();
+            String[] nodeStores = entry.getValue();
+            if (nodeStores == null) {
+                continue;
+            }
+            for (int i = 0; i < nodeStores.length; i++) {
+                File f = new File(nodeStores[i] + File.separator + relPathFile);
+                splits.add(new FileSplit(node, new FileReference(f)));
+            }
+        }
+        return splits.toArray(new FileSplit[] {});
+    }
+
     private FileSplit[] splitsForInternalOrFeedDataset(MetadataTransactionContext mdTxnCtx, String dataverseName,
             String datasetName, String targetIdxName) throws AlgebricksException {
 
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/ResultSetDataSink.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/ResultSetDataSink.java
index 53513d2..ad91111 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/ResultSetDataSink.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/ResultSetDataSink.java
@@ -17,6 +17,8 @@
 
 import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSink;
 import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.RandomPartitioningProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ResultSetDomain;
 
 public class ResultSetDataSink implements IDataSink {
 
@@ -40,7 +42,6 @@
 
     @Override
     public IPartitioningProperty getPartitioningProperty() {
-        return IPartitioningProperty.UNPARTITIONED;
+        return new RandomPartitioningProperty(new ResultSetDomain());
     }
-
 }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/ResultSetSinkId.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/ResultSetSinkId.java
index fc5152f..1eb4336 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/ResultSetSinkId.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/ResultSetSinkId.java
@@ -20,23 +20,16 @@
 
     private final ResultSetId resultSetId;
 
-    private final String resultNodeName;
-
-    public ResultSetSinkId(ResultSetId resultSetId, String resultNodeName) {
+    public ResultSetSinkId(ResultSetId resultSetId) {
         this.resultSetId = resultSetId;
-        this.resultNodeName = resultNodeName;
     }
 
     @Override
     public String toString() {
-        return "ResultSetId: " + resultSetId + "@" + resultNodeName;
+        return "ResultSetId: " + resultSetId;
     }
 
     public ResultSetId getResultSetId() {
         return resultSetId;
     }
-
-    public String getResultNodeName() {
-        return resultNodeName;
-    }
 }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
index 91a18f3..070e6ea 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
@@ -40,7 +40,7 @@
     private final Map<String, String> hints;
     private final int datasetId;
     // Type of pending operations with respect to atomic DDL operation
-    private final int pendingOp;
+    private int pendingOp;
 
     public Dataset(String dataverseName, String datasetName, String itemTypeName, IDatasetDetails datasetDetails,
             Map<String, String> hints, DatasetType datasetType, int datasetId, int pendingOp) {
@@ -86,6 +86,10 @@
         return pendingOp;
     }
 
+    public void setPendingOp(int pendingOp) {
+        this.pendingOp = pendingOp;
+    }
+
     @Override
     public Object addToCache(MetadataCache cache) {
         return cache.addDatasetIfNotExists(this);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
index 6d65730..aa29e5b 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
@@ -15,7 +15,6 @@
 
 package edu.uci.ics.asterix.metadata.entities;
 
-import java.io.Serializable;
 import java.util.List;
 
 import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
@@ -46,7 +45,7 @@
     // Specific to NGRAM indexes.
     private final int gramLength;
     // Type of pending operations with respect to atomic DDL operation
-    private final int pendingOp;
+    private int pendingOp;
 
     public Index(String dataverseName, String datasetName, String indexName, IndexType indexType,
             List<String> keyFieldNames, int gramLength, boolean isPrimaryIndex, int pendingOp) {
@@ -103,6 +102,10 @@
     public int getPendingOp() {
         return pendingOp;
     }
+    
+    public void setPendingOp(int pendingOp) {
+        this.pendingOp = pendingOp;
+    }
 
     public boolean isSecondaryIndex() {
         return !isPrimaryIndex();
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
index 0e486e7..ca926e4 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
@@ -2,7 +2,6 @@
 
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.types.EnumDeserializer;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -39,6 +38,7 @@
                     .createBinaryComparator();
             final IBinaryComparator ascDateOrTimeComp = ADateOrTimeAscBinaryComparatorFactory.INSTANCE
                     .createBinaryComparator();
+            final IBinaryComparator rawComp = RawBinaryComparatorFactory.INSTANCE.createBinaryComparator();
 
             @Override
             public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
@@ -84,7 +84,7 @@
                         return ascDateOrTimeComp.compare(b1, s1 + 1, l1 - 1, b2, s2 + 1, l2 - 1);
                     }
                     default: {
-                        throw new NotImplementedException("Comparison for type " + tag + " is not implemented");
+                        return rawComp.compare(b1, s1 + 1, l1 - 1, b2, s2 + 1, l2 - 1);
                     }
                 }
             }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/RawBinaryComparatorFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/RawBinaryComparatorFactory.java
new file mode 100644
index 0000000..2c4f951
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/RawBinaryComparatorFactory.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.dataflow.data.nontagged.comparators;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+
+public class RawBinaryComparatorFactory implements IBinaryComparatorFactory {
+
+    private static final long serialVersionUID = 1L;
+    public static IBinaryComparatorFactory INSTANCE = new RawBinaryComparatorFactory();
+
+    private RawBinaryComparatorFactory() {
+    }
+
+    @Override
+    public IBinaryComparator createBinaryComparator() {
+        return new IBinaryComparator() {
+
+            @Override
+            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+                int commonLength = Math.min(l1, l2);
+                for (int i = 0; i < commonLength; i++) {
+                    if (b1[s1 + i] != b2[s2 + i]) {
+                        return b1[s1 + i] - b2[s2 + i];
+                    }
+                }
+                int difference = l1 - l2;
+                return difference == 0 ? 0 : (difference > 0 ? 1 : -1);
+            }
+
+        };
+    }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java
index 15fd8c8..2d19320 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java
@@ -2,7 +2,6 @@
 
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.types.EnumDeserializer;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
@@ -36,6 +35,7 @@
                     .createBinaryHashFunction();
             private IBinaryHashFunction rectangleHash = RectangleBinaryHashFunctionFactory.INSTANCE
                     .createBinaryHashFunction();
+            private IBinaryHashFunction rawHash = RawBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction();
 
             @Override
             public int hash(byte[] bytes, int offset, int length) {
@@ -66,7 +66,7 @@
                         return 0;
                     }
                     default: {
-                        throw new NotImplementedException("Binary hashing for the " + tag + " type is not implemented.");
+                        return rawHash.hash(bytes, offset + 1, length - 1);
                     }
                 }
             }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/RawBinaryHashFunctionFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/RawBinaryHashFunctionFactory.java
new file mode 100644
index 0000000..4aeb00e
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/RawBinaryHashFunctionFactory.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.dataflow.data.nontagged.hash;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+
+public class RawBinaryHashFunctionFactory implements IBinaryHashFunctionFactory {
+    private static final long serialVersionUID = 1L;
+
+    public static IBinaryHashFunctionFactory INSTANCE = new RawBinaryHashFunctionFactory();
+
+    private RawBinaryHashFunctionFactory() {
+    }
+
+    @Override
+    public IBinaryHashFunction createBinaryHashFunction() {
+
+        return new IBinaryHashFunction() {
+            @Override
+            public int hash(byte[] bytes, int offset, int length) {
+                int value = 1;
+                int end = offset + length;
+                for (int i = offset; i < end; i++)
+                    value = value * 31 + (int) bytes[i];
+                return value;
+            }
+        };
+    }
+
+}
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
index 02f0e47..38fddb7 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
@@ -7,10 +7,10 @@
 import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.AObjectAscBinaryComparatorFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.AObjectDescBinaryComparatorFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.BooleanBinaryComparatorFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.RawBinaryComparatorFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.RectangleBinaryComparatorFactory;
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -128,7 +128,7 @@
                 return addOffset(ADateTimeAscBinaryComparatorFactory.INSTANCE, ascending);
             }
             default: {
-                throw new NotImplementedException("No binary comparator factory implemented for type " + type + " .");
+                return addOffset(RawBinaryComparatorFactory.INSTANCE, ascending);
             }
         }
     }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
index 1afdbf8..5498374 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
@@ -6,9 +6,9 @@
 import edu.uci.ics.asterix.dataflow.data.nontagged.hash.BooleanBinaryHashFunctionFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.hash.DoubleBinaryHashFunctionFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.hash.LongBinaryHashFunctionFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.hash.RawBinaryHashFunctionFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.hash.RectangleBinaryHashFunctionFactory;
 import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
@@ -88,8 +88,7 @@
                 return addOffset(RectangleBinaryHashFunctionFactory.INSTANCE);
             }
             default: {
-                throw new NotImplementedException("No binary hash function factory implemented for type "
-                        + aqlType.getTypeTag() + " .");
+                return addOffset(RawBinaryHashFunctionFactory.INSTANCE);
             }
         }
     }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
index cf00102..2305a9d 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
@@ -27,6 +27,7 @@
 import edu.uci.ics.asterix.om.typecomputer.impl.BinaryBooleanOrNullFunctionTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.BinaryStringBoolOrNullTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.BinaryStringStringOrNullTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.CastListResultTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.CastRecordResultTypeComputer;
 import edu.uci.ics.asterix.om.typecomputer.impl.ClosedRecordConstructorResultType;
 import edu.uci.ics.asterix.om.typecomputer.impl.FieldAccessByIndexResultType;
@@ -465,6 +466,8 @@
             "cast-record", 1);
     public final static FunctionIdentifier FLOW_RECORD = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
             "flow-record", 1);
+    public final static FunctionIdentifier CAST_LIST = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "cast-list", 1);
 
     // Spatial and temporal type accessors
     public static final FunctionIdentifier ACCESSOR_TEMPORAL_YEAR = new FunctionIdentifier(
@@ -704,7 +707,7 @@
         add(RECTANGLE_CONSTRUCTOR, OptionalARectangleTypeComputer.INSTANCE);
         // add(RECORD_TYPE_CONSTRUCTOR, null);
         add(SCALAR_AVG, ScalarVersionOfAggregateResultType.INSTANCE);
-        add(SCALAR_COUNT, ScalarVersionOfAggregateResultType.INSTANCE);
+        add(SCALAR_COUNT, AInt32TypeComputer.INSTANCE);
         add(SCALAR_GLOBAL_AVG, ScalarVersionOfAggregateResultType.INSTANCE);
         add(SCALAR_LOCAL_AVG, ScalarVersionOfAggregateResultType.INSTANCE);
         add(SCALAR_MAX, ScalarVersionOfAggregateResultType.INSTANCE);
@@ -779,6 +782,7 @@
         add(INJECT_FAILURE, InjectFailureTypeComputer.INSTANCE);
         add(CAST_RECORD, CastRecordResultTypeComputer.INSTANCE);
         add(FLOW_RECORD, FlowRecordResultTypeComputer.INSTANCE);
+        add(CAST_LIST, CastListResultTypeComputer.INSTANCE);
 
         add(TID, AInt32TypeComputer.INSTANCE);
         add(TIME_CONSTRUCTOR, OptionalATimeTypeComputer.INSTANCE);
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AListPointable.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AListPointable.java
index ab32b6b..84c9e6c 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AListPointable.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AListPointable.java
@@ -93,6 +93,7 @@
     @Override
     public void set(byte[] b, int s, int len) {
         reset();
+        super.set(b, s, len);
 
         int numberOfitems = AInt32SerializerDeserializer.getInt(b, s + 6);
         int itemOffset;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/CastListResultTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/CastListResultTypeComputer.java
new file mode 100644
index 0000000..4c3c56b
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/CastListResultTypeComputer.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+/**
+ * The type computer for the cast-list function
+ * 
+ * @author yingyib
+ */
+public class CastListResultTypeComputer implements IResultTypeComputer {
+
+    public static final CastListResultTypeComputer INSTANCE = new CastListResultTypeComputer();
+
+    @Override
+    public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+            IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+        ScalarFunctionCallExpression funcExpr = (ScalarFunctionCallExpression) expression;
+        return TypeComputerUtilities.getRequiredType(funcExpr);
+    }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedFieldAccessByNameResultType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedFieldAccessByNameResultType.java
index db9d932..6d5880e 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedFieldAccessByNameResultType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedFieldAccessByNameResultType.java
@@ -70,6 +70,9 @@
                     if (t1.getTypeTag() == ATypeTag.RECORD) {
                         return (ARecordType) t1;
                     }
+                    if (t1.getTypeTag() == ATypeTag.ANY) {
+                        return DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
+                    }
                 }
             }
             default: {
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ScalarVersionOfAggregateResultType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ScalarVersionOfAggregateResultType.java
index c8ba938..9126065 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ScalarVersionOfAggregateResultType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ScalarVersionOfAggregateResultType.java
@@ -1,6 +1,5 @@
 package edu.uci.ics.asterix.om.typecomputer.impl;
 
-
 import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.types.AUnionType;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java
index 76f3301..e280b2e 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java
@@ -23,7 +23,7 @@
 import java.util.Map;
 import java.util.Set;
 
-import edu.uci.ics.asterix.common.api.AsterixAppContextInfoImpl;
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfo;
 
 /**
  * Utility class for obtaining information on the set of Hyracks NodeController
@@ -52,7 +52,7 @@
 	public static Map<String, Set<String>> getNodeControllerMap()
 			throws Exception {
 		Map<String, Set<String>> map = new HashMap<String, Set<String>>();
-		AsterixAppContextInfoImpl.getInstance().getCCApplicationContext()
+		AsterixAppContextInfo.getInstance().getCCApplicationContext()
 				.getCCContext().getIPAddressNodeMap(map);
 		return map;
 	}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/CountAggregateFunction.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/CountAggregateFunction.java
index e4d015b..82c1e8a 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/CountAggregateFunction.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/CountAggregateFunction.java
@@ -6,7 +6,6 @@
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import edu.uci.ics.asterix.om.base.AInt32;
 import edu.uci.ics.asterix.om.base.AMutableInt32;
-import edu.uci.ics.asterix.om.base.ANull;
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.types.BuiltinType;
 import edu.uci.ics.asterix.om.types.EnumDeserializer;
@@ -20,19 +19,15 @@
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
 
 /**
- * count(NULL) returns NULL.
+ * COUNT returns the number of items in the given list. Note that COUNT(NULL) is not allowed.
  */
 public class CountAggregateFunction implements ICopyAggregateFunction {
     private AMutableInt32 result = new AMutableInt32(-1);
     @SuppressWarnings("unchecked")
     private ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
             .getSerializerDeserializer(BuiltinType.AINT32);
-    @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.ANULL);
     private ArrayBackedValueStorage inputVal = new ArrayBackedValueStorage();
     private ICopyEvaluator eval;
-    private boolean metNull;
     private int cnt;
     private DataOutput out;
 
@@ -44,7 +39,6 @@
     @Override
     public void init() {
         cnt = 0;
-        metNull = false;
     }
 
     @Override
@@ -53,9 +47,7 @@
         eval.evaluate(tuple);
         ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[0]);
         // Ignore SYSTEM_NULL.
-        if (typeTag == ATypeTag.NULL) {
-            metNull = true;
-        } else {
+        if (typeTag != ATypeTag.SYSTEM_NULL) {
             cnt++;
         }
     }
@@ -63,12 +55,8 @@
     @Override
     public void finish() throws AlgebricksException {
         try {
-            if (metNull) {
-                nullSerde.serialize(ANull.NULL, out);
-            } else {
-                result.setValue(cnt);
-                int32Serde.serialize(result, out);
-            }
+            result.setValue(cnt);
+            int32Serde.serialize(result, out);
         } catch (IOException e) {
             throw new AlgebricksException(e);
         }
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CastListDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CastListDescriptor.java
new file mode 100644
index 0000000..c1817e5
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CastListDescriptor.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.runtime.evaluators.functions;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.pointables.PointableAllocator;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.cast.ACastVisitor;
+import edu.uci.ics.asterix.om.types.AbstractCollectionType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * The runtime function for casting a list(unordered list or ordered list)
+ * 
+ * @author yingyib
+ *
+ */
+public class CastListDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new CastListDescriptor();
+        }
+    };
+
+    private static final long serialVersionUID = 1L;
+    private AbstractCollectionType reqType;
+    private AbstractCollectionType inputType;
+
+    public void reset(AbstractCollectionType reqType, AbstractCollectionType inputType) {
+        this.reqType = reqType;
+        this.inputType = inputType;
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return AsterixBuiltinFunctions.CAST_LIST;
+    }
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+        final ICopyEvaluatorFactory recordEvalFactory = args[0];
+
+        return new ICopyEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                final DataOutput out = output.getDataOutput();
+                final ArrayBackedValueStorage recordBuffer = new ArrayBackedValueStorage();
+                final ICopyEvaluator recEvaluator = recordEvalFactory.createEvaluator(recordBuffer);
+
+                return new ICopyEvaluator() {
+                    // pointable allocator
+                    private PointableAllocator allocator = new PointableAllocator();
+                    final IVisitablePointable recAccessor = allocator.allocateListValue(inputType);
+                    final IVisitablePointable resultAccessor = allocator.allocateListValue(reqType);
+                    final ACastVisitor castVisitor = new ACastVisitor();
+                    final Triple<IVisitablePointable, IAType, Boolean> arg = new Triple<IVisitablePointable, IAType, Boolean>(
+                            resultAccessor, reqType, Boolean.FALSE);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        try {
+                            recordBuffer.reset();
+                            recEvaluator.evaluate(tuple);
+                            recAccessor.set(recordBuffer);
+                            recAccessor.accept(castVisitor, arg);
+                            out.write(resultAccessor.getByteArray(), resultAccessor.getStartOffset(),
+                                    resultAccessor.getLength());
+                        } catch (Exception ioe) {
+                            throw new AlgebricksException(ioe);
+                        }
+                    }
+                };
+            }
+        };
+    }
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
index 6ec507b..a9d70ce 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
@@ -35,12 +35,14 @@
 import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
 import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
 import edu.uci.ics.asterix.om.functions.IFunctionManager;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
 import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
 import edu.uci.ics.asterix.om.types.AOrderedListType;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.types.AUnionType;
 import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.AbstractCollectionType;
 import edu.uci.ics.asterix.om.types.BuiltinType;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.runtime.aggregates.collections.ListifyAggregateDescriptor;
@@ -111,6 +113,7 @@
 import edu.uci.ics.asterix.runtime.evaluators.constructors.ATimeConstructorDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.AndDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.AnyCollectionMemberDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.CastListDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.CastRecordDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.ClosedRecordConstructorDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.CodePointToStringDescriptor;
@@ -216,13 +219,12 @@
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalEndsDecriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalMeetsDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalMetByDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.MillisecondsOfDayTimeDurationDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.MonthsOfYearMonthDurationDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.OverlapDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalOverlappedByDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalOverlapsDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalStartedByDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalStartsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.MillisecondsOfDayTimeDurationDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.MonthsOfYearMonthDurationDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.OverlapDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.SubtractDateDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.SubtractDatetimeDescriptor;
@@ -464,6 +466,7 @@
         temp.add(SwitchCaseDescriptor.FACTORY);
         temp.add(RegExpDescriptor.FACTORY);
         temp.add(InjectFailureDescriptor.FACTORY);
+        temp.add(CastListDescriptor.FACTORY);
         temp.add(CastRecordDescriptor.FACTORY);
         temp.add(FlowRecordDescriptor.FACTORY);
         temp.add(NotNullDescriptor.FACTORY);
@@ -692,9 +695,22 @@
             }
         }
         if (fd.getIdentifier().equals(AsterixBuiltinFunctions.CAST_RECORD)) {
-            ARecordType rt = (ARecordType) TypeComputerUtilities.getRequiredType((AbstractFunctionCallExpression) expr);
-            ARecordType it = (ARecordType) TypeComputerUtilities.getInputType((AbstractFunctionCallExpression) expr);
-            ((CastRecordDescriptor) fd).reset(rt, it);
+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+            ARecordType rt = (ARecordType) TypeComputerUtilities.getRequiredType(funcExpr);
+            IAType it = (IAType) context.getType(funcExpr.getArguments().get(0).getValue());
+            if (it.getTypeTag().equals(ATypeTag.ANY)) {
+                it = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
+            }
+            ((CastRecordDescriptor) fd).reset(rt, (ARecordType) it);
+        }
+        if (fd.getIdentifier().equals(AsterixBuiltinFunctions.CAST_LIST)) {
+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+            AbstractCollectionType rt = (AbstractCollectionType) TypeComputerUtilities.getRequiredType(funcExpr);
+            IAType it = (IAType) context.getType(funcExpr.getArguments().get(0).getValue());
+            if (it.getTypeTag().equals(ATypeTag.ANY)) {
+                it = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
+            }
+            ((CastListDescriptor) fd).reset(rt, (AbstractCollectionType) it);
         }
         if (fd.getIdentifier().equals(AsterixBuiltinFunctions.FLOW_RECORD)) {
             ARecordType it = (ARecordType) TypeComputerUtilities.getInputType((AbstractFunctionCallExpression) expr);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/unnestingfunctions/std/ScanCollectionDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/unnestingfunctions/std/ScanCollectionDescriptor.java
index c70cdf8..f29d6be 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/unnestingfunctions/std/ScanCollectionDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/unnestingfunctions/std/ScanCollectionDescriptor.java
@@ -22,6 +22,8 @@
 import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
 import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
 import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
 import edu.uci.ics.asterix.runtime.evaluators.common.AsterixListAccessor;
 import edu.uci.ics.asterix.runtime.unnestingfunctions.base.AbstractUnnestingFunctionDynamicDescriptor;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -73,12 +75,19 @@
                 private ArrayBackedValueStorage inputVal = new ArrayBackedValueStorage();
                 private ICopyEvaluator argEval = listEvalFactory.createEvaluator(inputVal);
                 private int itemIndex;
+                private boolean metNull = false;
 
                 @Override
                 public void init(IFrameTupleReference tuple) throws AlgebricksException {
                     try {
                         inputVal.reset();
                         argEval.evaluate(tuple);
+                        ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER
+                                .deserialize(inputVal.getByteArray()[0]);
+                        if (typeTag == ATypeTag.NULL) {
+                            metNull = true;
+                            return;
+                        }
                         listAccessor.reset(inputVal.getByteArray(), 0);
                         itemIndex = 0;
                     } catch (AsterixException e) {
@@ -89,10 +98,12 @@
                 @Override
                 public boolean step() throws AlgebricksException {
                     try {
-                        if (itemIndex < listAccessor.size()) {
-                            listAccessor.writeItem(itemIndex, out);
-                            ++itemIndex;
-                            return true;
+                        if (!metNull) {
+                            if (itemIndex < listAccessor.size()) {
+                                listAccessor.writeItem(itemIndex, out);
+                                ++itemIndex;
+                                return true;
+                            }
                         }
                     } catch (IOException e) {
                         throw new AlgebricksException(e);
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/IndexOperationTracker.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/IndexOperationTracker.java
index b3a6533..2016d08 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/IndexOperationTracker.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/IndexOperationTracker.java
@@ -15,6 +15,8 @@
 
 package edu.uci.ics.asterix.transaction.management.opcallbacks;
 
+import java.util.concurrent.atomic.AtomicInteger;
+
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
 import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
@@ -30,7 +32,7 @@
 public class IndexOperationTracker implements ILSMOperationTracker {
 
     // Number of active operations on a ILSMIndex instance.
-    private int numActiveOperations = 0;
+    private AtomicInteger numActiveOperations;
     private long lastLSN;
     private long firstLSN;
     private final ILSMIndex index;
@@ -38,6 +40,7 @@
     private ILSMIndexAccessor accessor;
 
     public IndexOperationTracker(ILSMIndex index, ILSMIOOperationCallbackFactory ioOpCallbackFactory) {
+        this.numActiveOperations = new AtomicInteger(0);
         this.index = index;
         //TODO 
         //This code is added to avoid NullPointException when the index's comparatorFactory is null.
@@ -54,7 +57,7 @@
     public void beforeOperation(LSMOperationType opType, ISearchOperationCallback searchCallback,
             IModificationOperationCallback modificationCallback) throws HyracksDataException {
         if (opType != LSMOperationType.FORCE_MODIFICATION) {
-            numActiveOperations++;
+            numActiveOperations.incrementAndGet();
 
             // Increment transactor-local active operations count.
             AbstractOperationCallback opCallback = getOperationCallback(searchCallback, modificationCallback);
@@ -76,7 +79,6 @@
     @Override
     public void completeOperation(LSMOperationType opType, ISearchOperationCallback searchCallback,
             IModificationOperationCallback modificationCallback) throws HyracksDataException {
-        numActiveOperations--;
 
         // Decrement transactor-local active operations count.
         AbstractOperationCallback opCallback = getOperationCallback(searchCallback, modificationCallback);
@@ -85,7 +87,7 @@
         }
         // If we need a flush, and this is the last completing operation, then schedule the flush.
         // Once the flush has completed notify all waiting operations.
-        if (index.getFlushStatus() && numActiveOperations == 0 && opType != LSMOperationType.FLUSH) {
+        if (index.getFlushStatus() && numActiveOperations.decrementAndGet() == 0 && opType != LSMOperationType.FLUSH) {
             if (accessor == null) {
                 accessor = (ILSMIndexAccessor) index.createAccessor(NoOpOperationCallback.INSTANCE,
                         NoOpOperationCallback.INSTANCE);
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
index 0a12ba1..01dce6c 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
@@ -25,7 +25,10 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
+import edu.uci.ics.asterix.transaction.management.service.logging.LogManager;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
 import edu.uci.ics.hyracks.storage.common.file.LocalResource;
@@ -33,6 +36,7 @@
 
 public class PersistentLocalResourceRepository implements ILocalResourceRepository {
 
+    private static final Logger LOGGER = Logger.getLogger(PersistentLocalResourceRepository.class.getName());
     private final String mountPoint;
     private static final String ROOT_METADATA_DIRECTORY = "asterix_root_metadata/";
     private static final String ROOT_METADATA_FILE_NAME_PREFIX = ".asterix_root_metadata_";
@@ -57,6 +61,9 @@
 
     public void initialize(String nodeId, String rootDir, boolean isNewUniverse, ResourceIdFactory resourceIdFactory)
             throws HyracksDataException {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Initializing local resource repository ... ");
+        }
         LocalResource rootLocalResource = null;
 
         //#. if the rootMetadataFile doesn't exist, create it and return.
@@ -67,6 +74,9 @@
             File rootMetadataDir = new File(mountPoint + ROOT_METADATA_DIRECTORY);
             if (!rootMetadataDir.exists()) {
                 rootMetadataDir.mkdir();
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("created the root-metadata-file's directory: " + rootMetadataDir.getAbsolutePath());
+                }
             }
 
             rootMetadataFile.delete();
@@ -77,17 +87,31 @@
             }
             rootLocalResource = new LocalResource(ROOT_LOCAL_RESOURCE_ID, rootMetadataFileName, 0, 0, this.rootDir);
             insert(rootLocalResource);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("created the root-metadata-file: " + rootMetadataFileName);
+            }
+            
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Completed the initialization of the local resource repository");
+            }
             return;
         }
 
         //#. if the rootMetadataFile exists, read it and set this.rootDir.
         rootLocalResource = readLocalResource(rootMetadataFile);
         this.rootDir = (String) rootLocalResource.getResourceObject();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("The root directory of the local resource repository is " + this.rootDir);
+        }
 
         //#. load all local resources. 
         File rootDirFile = new File(this.rootDir);
         if (!rootDirFile.exists()) {
             //rootDir may not exist if this node is not the metadata node and doesn't have any user data.
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("The root directory of the local resource repository doesn't exist: there is no local resource.");
+                LOGGER.info("Completed the initialization of the local resource repository");
+            }
             return;
         }
 
@@ -119,6 +143,10 @@
                                     id2ResourceMap.put(localResource.getResourceId(), localResource);
                                     name2ResourceMap.put(localResource.getResourceName(), localResource);
                                     maxResourceId = Math.max(localResource.getResourceId(), maxResourceId);
+                                    if (LOGGER.isLoggable(Level.INFO)) {
+                                        LOGGER.info("loaded local resource - [id: " + localResource.getResourceId()
+                                                + ", name: " + localResource.getResourceName() + "]");
+                                    }
                                 }
                             }
                         }
@@ -127,6 +155,10 @@
             }
         }
         resourceIdFactory.initId(maxResourceId + 1);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("The resource id factory is intialized with the value: " + (maxResourceId + 1));
+            LOGGER.info("Completed the initialization of the local resource repository");
+        }
     }
 
     @Override
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileBasedBuffer.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileBasedBuffer.java
index 4319b8b..e88d74e 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileBasedBuffer.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileBasedBuffer.java
@@ -19,6 +19,9 @@
 import java.io.RandomAccessFile;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 /**
  * Represent a buffer that is backed by a physical file. Provider custom APIs
@@ -27,22 +30,32 @@
 public class FileBasedBuffer extends Buffer implements IFileBasedBuffer {
 
     private String filePath;
-    private long nextWritePosition;
     private FileChannel fileChannel;
     private RandomAccessFile raf;
-    private int size;
+    private int bufferSize;
 
-    public FileBasedBuffer(String filePath, long offset, int size) throws IOException {
+    private int bufferLastFlushOffset;
+    private int bufferNextWriteOffset;
+    private final int diskSectorSize;
+
+    private final ReadWriteLock latch;
+    private final AtomicInteger referenceCount;
+
+    public FileBasedBuffer(String filePath, long offset, int bufferSize, int diskSectorSize) throws IOException {
         this.filePath = filePath;
-        this.nextWritePosition = offset;
-        buffer = ByteBuffer.allocate(size);
+        buffer = ByteBuffer.allocate(bufferSize);
         raf = new RandomAccessFile(new File(filePath), "rw");
-        raf.seek(offset);
         fileChannel = raf.getChannel();
+        fileChannel.position(offset);
         fileChannel.read(buffer);
         buffer.position(0);
-        this.size = size;
-        buffer.limit(size);
+        this.bufferSize = bufferSize;
+        buffer.limit(bufferSize);
+        bufferLastFlushOffset = 0;
+        bufferNextWriteOffset = 0;
+        this.diskSectorSize = diskSectorSize;
+        latch = new ReentrantReadWriteLock(true);
+        referenceCount = new AtomicInteger(0);
     }
 
     public String getFilePath() {
@@ -53,17 +66,9 @@
         this.filePath = filePath;
     }
 
-    public long getOffset() {
-        return nextWritePosition;
-    }
-
-    public void setOffset(long offset) {
-        this.nextWritePosition = offset;
-    }
-
     @Override
     public int getSize() {
-        return buffer.limit();
+        return bufferSize;
     }
 
     public void clear() {
@@ -72,11 +77,18 @@
 
     @Override
     public void flush() throws IOException {
-        buffer.position(0);
-        buffer.limit(size);
+        //flush
+        int pos = bufferLastFlushOffset;
+        int limit = (((bufferNextWriteOffset - 1) / diskSectorSize) + 1) * diskSectorSize;
+        buffer.position(pos);
+        buffer.limit(limit);
         fileChannel.write(buffer);
         fileChannel.force(true);
-        erase();
+
+        //update variables
+        bufferLastFlushOffset = limit;
+        bufferNextWriteOffset = limit;
+        buffer.limit(bufferSize);
     }
 
     @Override
@@ -124,45 +136,110 @@
      * starting at offset.
      */
     @Override
-    public void reset(String filePath, long nextWritePosition, int size) throws IOException {
+    public void reset(String filePath, long diskNextWriteOffset, int bufferSize) throws IOException {
         if (!filePath.equals(this.filePath)) {
             raf.close();//required?
             fileChannel.close();
             raf = new RandomAccessFile(filePath, "rw");
             this.filePath = filePath;
         }
-        this.nextWritePosition = nextWritePosition;
-        raf.seek(nextWritePosition);
         fileChannel = raf.getChannel();
+        fileChannel.position(diskNextWriteOffset);
         erase();
         buffer.position(0);
-        buffer.limit(size);
-        this.size = size;
+        buffer.limit(bufferSize);
+        this.bufferSize = bufferSize;
+
+        bufferLastFlushOffset = 0;
+        bufferNextWriteOffset = 0;
     }
-    
+
     @Override
     public void close() throws IOException {
         fileChannel.close();
     }
-    
+
     @Override
-    public void open(String filePath, long offset, int size) throws IOException {
+    public void open(String filePath, long offset, int bufferSize) throws IOException {
         raf = new RandomAccessFile(filePath, "rw");
-        this.nextWritePosition = offset;
         fileChannel = raf.getChannel();
         fileChannel.position(offset);
         erase();
         buffer.position(0);
-        buffer.limit(size);
-        this.size = size;
+        buffer.limit(bufferSize);
+        this.bufferSize = bufferSize;
+        bufferLastFlushOffset = 0;
+        bufferNextWriteOffset = 0;
     }
 
-    public long getNextWritePosition() {
-        return nextWritePosition;
+    @Override
+    public long getDiskNextWriteOffset() throws IOException {
+        return fileChannel.position();
     }
 
-    public void setNextWritePosition(long nextWritePosition) {
-        this.nextWritePosition = nextWritePosition;
+    @Override
+    public void setDiskNextWriteOffset(long offset) throws IOException {
+        fileChannel.position(offset);
     }
 
+    @Override
+    public int getBufferLastFlushOffset() {
+        return bufferLastFlushOffset;
+    }
+
+    @Override
+    public void setBufferLastFlushOffset(int offset) {
+        this.bufferLastFlushOffset = offset;
+    }
+
+    @Override
+    public int getBufferNextWriteOffset() {
+        synchronized (fileChannel) {
+            return bufferNextWriteOffset;
+        }
+    }
+
+    @Override
+    public void setBufferNextWriteOffset(int offset) {
+        synchronized (fileChannel) {
+            if (bufferNextWriteOffset < offset) {
+                bufferNextWriteOffset = offset;
+            }
+        }
+    }
+
+    @Override
+    public void acquireWriteLatch() {
+        latch.writeLock().lock();
+    }
+
+    @Override
+    public void releaseWriteLatch() {
+        latch.writeLock().unlock();
+    }
+
+    @Override
+    public void acquireReadLatch() {
+        latch.readLock().lock();
+    }
+
+    @Override
+    public void releaseReadLatch() {
+        latch.readLock().unlock();
+    }
+
+    @Override
+    public void incRefCnt() {
+        referenceCount.incrementAndGet();
+    }
+    
+    @Override
+    public void decRefCnt() {
+        referenceCount.decrementAndGet();
+    }
+    
+    @Override
+    public int getRefCnt() {
+        return referenceCount.get();
+    }
 }
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileUtil.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileUtil.java
index f2ffa0e..46e03f1 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileUtil.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileUtil.java
@@ -38,8 +38,8 @@
         return (new File(path)).mkdir();
     }
 
-    public static IFileBasedBuffer getFileBasedBuffer(String filePath, long offset, int size) throws IOException {
-        IFileBasedBuffer fileBasedBuffer = new FileBasedBuffer(filePath, offset, size);
+    public static IFileBasedBuffer getFileBasedBuffer(String filePath, long offset, int bufferSize, int diskSectorSize) throws IOException {
+        IFileBasedBuffer fileBasedBuffer = new FileBasedBuffer(filePath, offset, bufferSize, diskSectorSize);
         return fileBasedBuffer;
     }
 
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/IFileBasedBuffer.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/IFileBasedBuffer.java
index e1f9f95..a4ea3cb 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/IFileBasedBuffer.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/IFileBasedBuffer.java
@@ -31,12 +31,34 @@
      */
     public void reset(String filePath, long offset, int size) throws IOException;
 
-    public long getNextWritePosition();
+    public long getDiskNextWriteOffset() throws IOException;
 
-    public void setNextWritePosition(long writePosition);
+    public void setDiskNextWriteOffset(long writePosition) throws IOException;
 
     public void close() throws IOException;
     
     public void open(String filePath, long offset, int size) throws IOException;
 
+    public int getBufferLastFlushOffset();
+
+    public void setBufferLastFlushOffset(int offset);
+
+    public int getBufferNextWriteOffset();
+
+    public void setBufferNextWriteOffset(int offset);
+    
+    public void acquireWriteLatch();
+
+    public void releaseWriteLatch();
+
+    public void acquireReadLatch();
+
+    public void releaseReadLatch();
+    
+    public void incRefCnt();
+    
+    public void decRefCnt();
+    
+    public int getRefCnt();
+
 }
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/ILogManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/ILogManager.java
index c629d03..26229a7 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/ILogManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/ILogManager.java
@@ -53,17 +53,6 @@
             ACIDException;
 
     /**
-     * Provides a cursor for retrieving logs that satisfy a given ILogFilter
-     * instance. Log records are retrieved in increasing order of lsn
-     * 
-     * @param logFilter
-     *            specifies the filtering criteria for the retrieved logs
-     * @return LogCursor an iterator for the retrieved logs
-     * @throws ACIDException
-     */
-    public ILogCursor readLog(ILogFilter logFilter) throws ACIDException;
-
-    /**
      * @param logicalLogLocator TODO
      * @param PhysicalLogLocator
      *            specifies the location of the log record to be read
@@ -72,15 +61,6 @@
     public void readLog(long lsnValue, LogicalLogLocator logicalLogLocator) throws ACIDException;
 
     /**
-     * Flushes the log records up to the lsn represented by the
-     * logicalLogLocator
-     * 
-     * @param logicalLogLocator
-     * @throws ACIDException
-     */
-    public void flushLog(LogicalLogLocator logicalLogLocator) throws ACIDException;
-
-    /**
      * Retrieves the configuration parameters of the ILogManager
      * 
      * @return LogManagerProperties: the configuration parameters for the
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/ILogRecordHelper.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/ILogRecordHelper.java
index 80f74cb..0e24f9d 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/ILogRecordHelper.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/ILogRecordHelper.java
@@ -25,41 +25,43 @@
 
 public interface ILogRecordHelper {
 
-    byte getLogType(LogicalLogLocator logicalLogLocator);
+    public byte getLogType(LogicalLogLocator logicalLogLocator);
 
-    int getJobId(LogicalLogLocator logicalLogLocator);
+    public int getJobId(LogicalLogLocator logicalLogLocator);
 
-    int getDatasetId(LogicalLogLocator logicalLogLocator);
+    public int getDatasetId(LogicalLogLocator logicalLogLocator);
 
-    int getPKHashValue(LogicalLogLocator logicalLogLocator);
+    public int getPKHashValue(LogicalLogLocator logicalLogLocator);
 
-    PhysicalLogLocator getPrevLSN(LogicalLogLocator logicalLogLocator);
+    public PhysicalLogLocator getPrevLSN(LogicalLogLocator logicalLogLocator);
 
-    boolean getPrevLSN(PhysicalLogLocator physicalLogLocator, LogicalLogLocator logicalLogLocator);
+    public boolean getPrevLSN(PhysicalLogLocator physicalLogLocator, LogicalLogLocator logicalLogLocator);
     
-    long getResourceId(LogicalLogLocator logicalLogLocator);
+    public long getResourceId(LogicalLogLocator logicalLogLocator);
     
-    byte getResourceMgrId(LogicalLogLocator logicalLogLocater);
+    public byte getResourceMgrId(LogicalLogLocator logicalLogLocater);
 
-    int getLogContentSize(LogicalLogLocator logicalLogLocater);
+    public int getLogContentSize(LogicalLogLocator logicalLogLocater);
 
-    long getLogChecksum(LogicalLogLocator logicalLogLocator);
+    public long getLogChecksum(LogicalLogLocator logicalLogLocator);
 
-    int getLogContentBeginPos(LogicalLogLocator logicalLogLocator);
+    public int getLogContentBeginPos(LogicalLogLocator logicalLogLocator);
 
-    int getLogContentEndPos(LogicalLogLocator logicalLogLocator);
+    public int getLogContentEndPos(LogicalLogLocator logicalLogLocator);
 
-    String getLogRecordForDisplay(LogicalLogLocator logicalLogLocator);
+    public String getLogRecordForDisplay(LogicalLogLocator logicalLogLocator);
 
-    void writeLogHeader(LogicalLogLocator logicalLogLocator, byte logType, TransactionContext context, int datasetId,
+    public void writeLogHeader(LogicalLogLocator logicalLogLocator, byte logType, TransactionContext context, int datasetId,
             int PKHashValue, long prevLogicalLogLocator, long resourceId, byte resourceMgrId, int logRecordSize);
 
-    boolean validateLogRecord(LogicalLogLocator logicalLogLocator);
+    public boolean validateLogRecord(LogicalLogLocator logicalLogLocator);
 
-    int getLogRecordSize(byte logType, int logBodySize);
+    public int getLogRecordSize(byte logType, int logBodySize);
 
-    int getLogHeaderSize(byte logType);
+    public int getLogHeaderSize(byte logType);
 
-    int getLogChecksumSize();
+    public int getLogChecksumSize();
+    
+    public int getCommitLogSize();
 
 }
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogCursor.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogCursor.java
index 8a2b188..7e954d8 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogCursor.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogCursor.java
@@ -23,27 +23,16 @@
 
     private final LogManager logManager;
     private final ILogFilter logFilter;
+    private final int logPageSize;
     private IBuffer readOnlyBuffer;
     private LogicalLogLocator logicalLogLocator = null;
-    private long bufferIndex = 0;
-    private boolean firstNext = true;
-    private boolean readMemory = false;
-    private long readLSN = 0;
     private boolean needReloadBuffer = true;
 
-    /**
-     * @param logFilter
-     */
-    public LogCursor(final LogManager logManager, ILogFilter logFilter) throws ACIDException {
+    public LogCursor(final LogManager logManager, PhysicalLogLocator startingPhysicalLogLocator, ILogFilter logFilter,
+            int logPageSize) throws IOException, ACIDException {
         this.logFilter = logFilter;
         this.logManager = logManager;
-
-    }
-
-    public LogCursor(final LogManager logManager, PhysicalLogLocator startingPhysicalLogLocator, ILogFilter logFilter)
-            throws IOException, ACIDException {
-        this.logFilter = logFilter;
-        this.logManager = logManager;
+        this.logPageSize = logPageSize;
         initialize(startingPhysicalLogLocator);
     }
 
@@ -57,7 +46,8 @@
         File file = new File(filePath);
         if (file.exists()) {
             return FileUtil.getFileBasedBuffer(filePath, lsn
-                    % logManager.getLogManagerProperties().getLogPartitionSize(), size);
+                    % logManager.getLogManagerProperties().getLogPartitionSize(), size, logManager
+                    .getLogManagerProperties().getDiskSectorSize());
         } else {
             return null;
         }
@@ -87,8 +77,7 @@
             return false;
         }
 
-        //if the lsn to read is greater than the last flushed lsn, then read from memory
-        if (logicalLogLocator.getLsn() > logManager.getLastFlushedLsn().get()) {
+        if (logManager.isMemoryRead(logicalLogLocator.getLsn())) {
             return readFromMemory(currentLogLocator);
         }
 
@@ -96,10 +85,9 @@
         //needReloadBuffer is set to true if the log record is read from the memory log page.
         if (needReloadBuffer) {
             //log page size doesn't exceed integer boundary
-            int offset = (int)(logicalLogLocator.getLsn() % logManager.getLogManagerProperties().getLogPageSize());
+            int offset = (int) (logicalLogLocator.getLsn() % logPageSize);
             long adjustedLSN = logicalLogLocator.getLsn() - offset;
-            readOnlyBuffer = getReadOnlyBuffer(adjustedLSN, logManager.getLogManagerProperties()
-                    .getLogPageSize());
+            readOnlyBuffer = getReadOnlyBuffer(adjustedLSN, logPageSize);
             logicalLogLocator.setBuffer(readOnlyBuffer);
             logicalLogLocator.setMemoryOffset(offset);
             needReloadBuffer = false;
@@ -110,14 +98,14 @@
         while (logicalLogLocator.getMemoryOffset() <= readOnlyBuffer.getSize()
                 - logManager.getLogRecordHelper().getLogHeaderSize(LogType.COMMIT)) {
             integerRead = readOnlyBuffer.readInt(logicalLogLocator.getMemoryOffset());
-            if (integerRead == logManager.getLogManagerProperties().LOG_MAGIC_NUMBER) {
+            if (integerRead == LogManagerProperties.LOG_MAGIC_NUMBER) {
                 logRecordBeginPosFound = true;
                 break;
             }
             logicalLogLocator.increaseMemoryOffset(1);
             logicalLogLocator.incrementLsn();
             bytesSkipped++;
-            if (bytesSkipped > logManager.getLogManagerProperties().getLogPageSize()) {
+            if (bytesSkipped > logPageSize) {
                 return false; // the maximum size of a log record is limited to
                 // a log page size. If we have skipped as many
                 // bytes without finding a log record, it
@@ -133,10 +121,9 @@
             // need to reload the buffer
             // TODO
             // reduce IO by reading more pages(equal to logBufferSize) at a time.
-            long lsnpos = ((logicalLogLocator.getLsn() / logManager.getLogManagerProperties().getLogPageSize()) + 1)
-                    * logManager.getLogManagerProperties().getLogPageSize();
+            long lsnpos = ((logicalLogLocator.getLsn() / logPageSize) + 1) * logPageSize;
 
-            readOnlyBuffer = getReadOnlyBuffer(lsnpos, logManager.getLogManagerProperties().getLogPageSize());
+            readOnlyBuffer = getReadOnlyBuffer(lsnpos, logPageSize);
             if (readOnlyBuffer != null) {
                 logicalLogLocator.setBuffer(readOnlyBuffer);
                 logicalLogLocator.setLsn(lsnpos);
@@ -190,13 +177,12 @@
         IFileBasedBuffer logPage = logManager.getLogPage(pageIndex);
         synchronized (logPage) {
             // need to check again if the log record in the log buffer or has reached the disk
-            if (lsn > logManager.getLastFlushedLsn().get()) {
+            if (logManager.isMemoryRead(lsn)) {
 
                 //find the magic number to identify the start of the log record
                 //----------------------------------------------------------------
                 int readNumber = -1;
-                int logPageSize = logManager.getLogManagerProperties().getLogPageSize();
-                int logMagicNumber = logManager.getLogManagerProperties().LOG_MAGIC_NUMBER;
+                int logMagicNumber = LogManagerProperties.LOG_MAGIC_NUMBER;
                 int bytesSkipped = 0;
                 boolean logRecordBeginPosFound = false;
                 //check whether the currentOffset has enough space to have new log record by comparing
@@ -223,7 +209,8 @@
                     // need to read the next log page
                     readOnlyBuffer = null;
                     logicalLogLocator.setBuffer(null);
-                    logicalLogLocator.setLsn(lsn / logPageSize + 1);
+                    lsn = ((logicalLogLocator.getLsn() / logPageSize) + 1) * logPageSize;
+                    logicalLogLocator.setLsn(lsn);
                     logicalLogLocator.setMemoryOffset(0);
                     return next(currentLogLocator);
                 }
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
index 9b8f09c..199fd0f 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
@@ -27,15 +27,12 @@
 import java.util.Properties;
 import java.util.Set;
 import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
 import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
 import edu.uci.ics.asterix.transaction.management.service.logging.IndexLogger.ReusableLogContentObject;
-import edu.uci.ics.asterix.transaction.management.service.logging.LogManager.PageOwnershipStatus;
-import edu.uci.ics.asterix.transaction.management.service.logging.LogManager.PageState;
 import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
 import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionManagementConstants;
 import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
@@ -48,6 +45,9 @@
     private final TransactionSubsystem provider;
     private LogManagerProperties logManagerProperties;
     private LogPageFlushThread logPageFlusher;
+    private final int logPageSize;
+    private long statLogSize;
+    private long statLogCount;
 
     /*
      * the array of log pages. The number of log pages is configurable. Pages
@@ -62,47 +62,6 @@
      */
     private int numLogPages;
 
-    /*
-     * Initially all pages have an owner count of 1 that is the LogManager. When
-     * a transaction requests to write in a log page, the owner count is
-     * incremented. The log manager reserves space in the log page and puts in
-     * the log header but leaves the space for the content and the checksum
-     * (covering the whole log record). When the content has been put, the log
-     * manager computes the checksum and puts it after the content. At this
-     * point, the ownership count is decremented as the transaction is done with
-     * using the page. When a page is requested to be flushed, logPageFlusher
-     * set the count to 0(LOG_FLUSHER: meaning that the page is being flushed)
-     * only if the count is 1(LOG_WRITER: meaning that there is no other
-     * transactions who own the page to write logs.) After flushing the page,
-     * logPageFlusher set this count to 1.
-     */
-    private AtomicInteger[] logPageOwnerCount;
-
-    static class PageOwnershipStatus {
-        public static final int LOG_WRITER = 1;
-        public static final int LOG_FLUSHER = 0;
-    }
-
-    /*
-     * LogPageStatus: A page is either ACTIVE or INACTIVE. The status for each
-     * page is maintained in logPageStatus. A page is ACTIVE when the LogManager
-     * can allocate space in the page for writing a log record. Initially all
-     * pages are ACTIVE. As transactions fill up space by writing log records, a
-     * page may not have sufficient space left for serving a request by a
-     * transaction. When this happens, the page is flushed to disk by calling
-     * logPageFlusher.requestFlush(). In the requestFlush(), after
-     * groupCommitWaitTime, the page status is set to INACTIVE. Then, there is
-     * no more writer on the page(meaning the corresponding logPageOwnerCount is
-     * 1), the page is flushed by the logPageFlusher and the status is reset to
-     * ACTIVE by the logPageFlusher.
-     */
-    private AtomicInteger[] logPageStatus;
-
-    static class PageState {
-        public static final int INACTIVE = 0;
-        public static final int ACTIVE = 1;
-    }
-
     private AtomicLong lastFlushedLSN = new AtomicLong(-1);
 
     /*
@@ -129,10 +88,6 @@
         return lastFlushedLSN;
     }
 
-    public AtomicInteger getLogPageStatus(int pageIndex) {
-        return logPageStatus[pageIndex];
-    }
-
     public AtomicLong getCurrentLsn() {
         return lsn;
     }
@@ -144,13 +99,19 @@
     public LogManager(TransactionSubsystem provider) throws ACIDException {
         this.provider = provider;
         initLogManagerProperties(this.provider.getId());
+        logPageSize = logManagerProperties.getLogPageSize();
         initLogManager();
+        statLogSize = 0;
+        statLogCount = 0;
     }
 
     public LogManager(TransactionSubsystem provider, String nodeId) throws ACIDException {
         this.provider = provider;
         initLogManagerProperties(nodeId);
+        logPageSize = logManagerProperties.getLogPageSize();
         initLogManager();
+        statLogSize = 0;
+        statLogCount = 0;
     }
 
     /*
@@ -186,9 +147,6 @@
     private void initLogManager() throws ACIDException {
         logRecordHelper = new LogRecordHelper(this);
         numLogPages = logManagerProperties.getNumLogPages();
-        logPageOwnerCount = new AtomicInteger[numLogPages];
-        logPageStatus = new AtomicInteger[numLogPages];
-
         activeTxnCountMaps = new ArrayList<HashMap<TransactionContext, Integer>>(numLogPages);
         for (int i = 0; i < numLogPages; i++) {
             activeTxnCountMaps.add(new HashMap<TransactionContext, Integer>());
@@ -199,20 +157,12 @@
         /*
          * place the log anchor at the end of the last log record written.
          */
-        PhysicalLogLocator nextPhysicalLsn = initLSN();
-
-        /*
-         * initialize meta data for each log page.
-         */
-        for (int i = 0; i < numLogPages; i++) {
-            logPageOwnerCount[i] = new AtomicInteger(PageOwnershipStatus.LOG_WRITER);
-            logPageStatus[i] = new AtomicInteger(PageState.ACTIVE);
-        }
+        initLSN();
 
         /*
          * initialize the log pages.
          */
-        initializeLogPages(nextPhysicalLsn);
+        initializeLogPages(startingLSN);
 
         /*
          * Instantiate and begin the LogFlusher thread. The Log Flusher thread
@@ -226,7 +176,7 @@
     }
 
     public int getLogPageIndex(long lsnValue) {
-        return (int) (((lsnValue - startingLSN) / logManagerProperties.getLogPageSize()) % numLogPages);
+        return (int) (((lsnValue - startingLSN) / logPageSize) % numLogPages);
     }
 
     /*
@@ -242,28 +192,7 @@
      * record is (to be) placed.
      */
     public int getLogPageOffset(long lsnValue) {
-        return (int) ((lsnValue - startingLSN) % logManagerProperties.getLogPageSize());
-    }
-
-    /*
-     * a transaction thread under certain scenarios is required to wait until
-     * the page where it has to write a log record becomes available for writing
-     * a log record.
-     */
-    private void waitUntillPageIsAvailableForWritingLog(int pageIndex) throws ACIDException {
-        if (logPageStatus[pageIndex].get() == PageState.ACTIVE
-                && logPageOwnerCount[pageIndex].get() >= PageOwnershipStatus.LOG_WRITER) {
-            return;
-        }
-        try {
-            synchronized (logPages[pageIndex]) {
-                while (!(logPageStatus[pageIndex].get() == PageState.ACTIVE && logPageOwnerCount[pageIndex].get() >= PageOwnershipStatus.LOG_WRITER)) {
-                    logPages[pageIndex].wait();
-                }
-            }
-        } catch (InterruptedException e) {
-            throw new ACIDException(" thread interrupted while waiting for page " + pageIndex + " to be available ", e);
-        }
+        return (int) (lsnValue % logPageSize);
     }
 
     /*
@@ -277,7 +206,6 @@
      * @param logType: the type of log record.
      */
     private long getLsn(int entrySize, byte logType) throws ACIDException {
-        long pageSize = logManagerProperties.getLogPageSize();
 
         while (true) {
             boolean forwardPage = false;
@@ -294,9 +222,9 @@
 
             // check if the log record will cross page boundaries, a case that
             // is not allowed.
-            if ((next - 1) / pageSize != old / pageSize || (next % pageSize == 0)) {
+            if ((next - 1) / logPageSize != old / logPageSize || (next % logPageSize == 0)) {
 
-                if ((old != 0 && old % pageSize == 0)) {
+                if ((old != 0 && old % logPageSize == 0)) {
                     // On second thought, this shall never be the case as it
                     // means that the lsn is
                     // currently at the beginning of a page and we still need to
@@ -309,7 +237,7 @@
 
                 } else {
                     // set the lsn to point to the beginning of the next page.
-                    retVal = ((old / pageSize) + 1) * pageSize;
+                    retVal = ((old / logPageSize) + 1) * logPageSize;
                 }
 
                 next = retVal;
@@ -323,20 +251,6 @@
                 pageIndex = getNextPageInSequence(pageIndex);
             }
 
-            /*
-             * we do not want to keep allocating LSNs if the corresponding page
-             * is unavailable. Consider a scenario when the log flusher thread
-             * is incredibly slow in flushing pages. Transaction threads will
-             * acquire an lsn each for writing their next log record. When a
-             * page has been made available, mulltiple transaction threads that
-             * were waiting can continue to write their log record at the
-             * assigned LSNs. Two transaction threads may get LSNs that are on
-             * the same log page but actually differ by the size of the log
-             * buffer. This would be erroneous. Transaction threads are made to
-             * wait upfront for avoiding this situation.
-             */
-            waitUntillPageIsAvailableForWritingLog(pageIndex);
-
             if (!lsn.compareAndSet(old, next)) {
                 // Atomic call -> returns true only when the value represented
                 // by lsn is same as
@@ -345,6 +259,10 @@
             }
 
             if (forwardPage) {
+
+                // forward the nextWriteOffset in the log page
+                logPages[pageIndex].setBufferNextWriteOffset(logPageSize);
+
                 addFlushRequest(prevPage, old, false);
 
                 // The transaction thread that discovers the need to forward a
@@ -352,21 +270,18 @@
                 continue;
 
             } else {
-                // the transaction thread has been given a space in a log page,
-                // but is made to wait until the page is available.
-                // (Is this needed? when does this wait happen?)
-                waitUntillPageIsAvailableForWritingLog(pageIndex);
-
+                logPages[pageIndex].acquireReadLatch();
                 // increment the counter as the transaction thread now holds a
                 // space in the log page and hence is an owner.
-                logPageOwnerCount[pageIndex].incrementAndGet();
+                logPages[pageIndex].incRefCnt();
+                logPages[pageIndex].releaseReadLatch();
 
                 // Before the count is incremented, if the flusher flushed the
                 // allocated page,
                 // then retry to get new LSN. Otherwise, the log with allocated
                 // lsn will be lost.
                 if (lastFlushedLSN.get() >= retVal) {
-                    logPageOwnerCount[pageIndex].decrementAndGet();
+                    logPages[pageIndex].decRefCnt();
                     continue;
                 }
             }
@@ -396,10 +311,10 @@
         int totalLogSize = logRecordHelper.getLogRecordSize(logType, logContentSize);
 
         // check for the total space requirement to be less than a log page.
-        if (totalLogSize > logManagerProperties.getLogPageSize()) {
+        if (totalLogSize > logPageSize) {
             throw new ACIDException(
                     " Maximum Log Content Size is "
-                            + (logManagerProperties.getLogPageSize() - logRecordHelper.getLogHeaderSize(LogType.UPDATE) - logRecordHelper
+                            + (logPageSize - logRecordHelper.getLogHeaderSize(LogType.UPDATE) - logRecordHelper
                                     .getLogChecksumSize()));
         }
 
@@ -482,16 +397,21 @@
             logPages[pageIndex].writeLong(pageOffset + logRecordHelper.getLogHeaderSize(logType) + logContentSize,
                     checksum);
 
-            if (IS_DEBUG_MODE) {
-                System.out.println("--------------> LSN(" + currentLSN + ") is written");
+            // forward the nextWriteOffset in the log page
+            int bufferNextWriteOffset = (int) ((currentLSN + totalLogSize) % logPageSize);
+            if (bufferNextWriteOffset == 0) {
+                bufferNextWriteOffset = logPageSize;
             }
+            logPages[pageIndex].setBufferNextWriteOffset(bufferNextWriteOffset);
 
-            // release the ownership as the log record has been placed in
-            // created space.
-            logPageOwnerCount[pageIndex].decrementAndGet();
+            if (logType != LogType.ENTITY_COMMIT) {
+                // release the ownership as the log record has been placed in
+                // created space.
+                logPages[pageIndex].decRefCnt();
 
-            // indicating that the transaction thread has released ownership
-            decremented = true;
+                // indicating that the transaction thread has released ownership
+                decremented = true;
+            }
 
             if (logType == LogType.ENTITY_COMMIT) {
                 map = activeTxnCountMaps.get(pageIndex);
@@ -502,18 +422,42 @@
                 } else {
                     map.put(txnCtx, 1);
                 }
+                //------------------------------------------------------------------------------
+                // [Notice]
+                // reference count should be decremented 
+                // after activeTxnCount is incremented, but before addFlushRequest() is called. 
+                //------------------------------------------------------------------------------
+                // release the ownership as the log record has been placed in
+                // created space.
+                logPages[pageIndex].decRefCnt();
+
+                // indicating that the transaction thread has released ownership
+                decremented = true;
+                
                 addFlushRequest(pageIndex, currentLSN, false);
             } else if (logType == LogType.COMMIT) {
+
                 addFlushRequest(pageIndex, currentLSN, true);
+                if (IS_DEBUG_MODE) {
+                    System.out.println("Running sum of log size: " + statLogSize + ", log count: " + statLogCount);
+                }
             }
 
+            if (IS_DEBUG_MODE) {
+                System.out.println("--------------> LSN(" + currentLSN + ") is written");
+            }
+
+            //collect statistics
+            statLogSize += totalLogSize;
+            statLogCount++;
+
         } catch (Exception e) {
             e.printStackTrace();
             throw new ACIDException(txnCtx, "Thread: " + Thread.currentThread().getName()
                     + " logger encountered exception", e);
         } finally {
             if (!decremented) {
-                logPageOwnerCount[pageIndex].decrementAndGet();
+                logPages[pageIndex].decRefCnt();
             }
         }
     }
@@ -526,20 +470,13 @@
 
         String filePath = LogUtil.getLogFilePath(logManagerProperties, getLogFileId(lsn));
 
-        logPages[pageIndex].reset(filePath, LogUtil.getFileOffset(this, nextWritePosition),
-                logManagerProperties.getLogPageSize());
-    }
-
-    @Override
-    public ILogCursor readLog(ILogFilter logFilter) throws ACIDException {
-        LogCursor cursor = new LogCursor(this, logFilter);
-        return cursor;
+        logPages[pageIndex].reset(filePath, LogUtil.getFileOffset(this, nextWritePosition), logPageSize);
     }
 
     @Override
     public ILogCursor readLog(PhysicalLogLocator physicalLogLocator, ILogFilter logFilter) throws IOException,
             ACIDException {
-        LogCursor cursor = new LogCursor(this, physicalLogLocator, logFilter);
+        LogCursor cursor = new LogCursor(this, physicalLogLocator, logFilter, logPageSize);
         return cursor;
     }
 
@@ -550,7 +487,7 @@
         String filePath = LogUtil.getLogFilePath(logManagerProperties, LogUtil.getFileId(this, lsnValue));
         long fileOffset = LogUtil.getFileOffset(this, lsnValue);
 
-        ByteBuffer buffer = ByteBuffer.allocate(logManagerProperties.getLogPageSize());
+        ByteBuffer buffer = ByteBuffer.allocate(logPageSize);
         RandomAccessFile raf = null;
         FileChannel fileChannel = null;
         try {
@@ -603,7 +540,7 @@
         }
 
         /* check if the log record in the log buffer or has reached the disk. */
-        if (lsnValue > getLastFlushedLsn().get()) {
+        if (isMemoryRead(lsnValue)) {
             int pageIndex = getLogPageIndex(lsnValue);
             int pageOffset = getLogPageOffset(lsnValue);
 
@@ -611,7 +548,7 @@
             // minimize memory allocation overhead. current code allocates the
             // log page size per reading a log record.
 
-            byte[] pageContent = new byte[logManagerProperties.getLogPageSize()];
+            byte[] pageContent = new byte[logPageSize];
 
             // take a lock on the log page so that the page is not flushed to
             // disk interim
@@ -619,8 +556,8 @@
 
                 // need to check again (this thread may have got de-scheduled
                 // and must refresh!)
-                if (lsnValue > getLastFlushedLsn().get()) {
 
+                if (isMemoryRead(lsnValue)) {
                     // get the log record length
                     logPages[pageIndex].getBytes(pageContent, 0, pageContent.length);
                     byte logType = pageContent[pageOffset + 4];
@@ -656,6 +593,20 @@
         readDiskLog(lsnValue, logicalLogLocator);
     }
 
+    public boolean isMemoryRead(long currentLSN) {
+        long flushLSN = lastFlushedLSN.get();
+        if ((flushLSN + 1) % logPageSize == 0) {
+            return false;
+        }
+        long logPageBeginOffset = flushLSN - (flushLSN % logPageSize);
+        long logPageEndOffset = logPageBeginOffset + logPageSize;
+        if (currentLSN > flushLSN || (currentLSN >= logPageBeginOffset && currentLSN < logPageEndOffset)) {
+            return true;
+        } else {
+            return false;
+        }
+    }
+
     public void renewLogFiles() throws ACIDException {
         List<String> logFileNames = LogUtil.getLogFiles(logManagerProperties);
         for (String name : logFileNames) {
@@ -670,7 +621,7 @@
         logPageFlusher.renew();
     }
 
-    private PhysicalLogLocator initLSN() throws ACIDException {
+    private void initLSN() throws ACIDException {
         PhysicalLogLocator nextPhysicalLsn = LogUtil.initializeLogAnchor(this);
         startingLSN = nextPhysicalLsn.getLsn();
         lastFlushedLSN.set(startingLSN - 1);
@@ -678,7 +629,6 @@
             LOGGER.info(" Starting lsn is : " + startingLSN);
         }
         lsn.set(startingLSN);
-        return nextPhysicalLsn;
     }
 
     private void closeLogPages() throws ACIDException {
@@ -695,9 +645,7 @@
         try {
             String filePath = LogUtil.getLogFilePath(logManagerProperties, LogUtil.getFileId(this, startingLSN));
             for (int i = 0; i < numLogPages; i++) {
-                logPages[i].open(filePath,
-                        LogUtil.getFileOffset(this, startingLSN) + i * logManagerProperties.getLogPageSize(),
-                        logManagerProperties.getLogPageSize());
+                logPages[i].open(filePath, LogUtil.getFileOffset(this, startingLSN) + i * logPageSize, logPageSize);
             }
         } catch (Exception e) {
             throw new ACIDException(Thread.currentThread().getName() + " unable to create log buffer", e);
@@ -710,33 +658,25 @@
     }
 
     /*
-     * This method shall be called by the Buffer manager when it needs to evict
-     * a page from the cache. TODO: Change the implementation from a looping
-     * logic to event based when log manager support is integrated with the
-     * Buffer Manager.
-     */
-    @Override
-    public synchronized void flushLog(LogicalLogLocator logicalLogLocator) throws ACIDException {
-        if (logicalLogLocator.getLsn() > lsn.get()) {
-            throw new ACIDException(" invalid lsn " + logicalLogLocator.getLsn());
-        }
-        while (lastFlushedLSN.get() < logicalLogLocator.getLsn());
-    }
-
-    /*
      * Map each log page to cover a physical byte range over a log file. When a
      * page is flushed, the page contents are put to disk in the corresponding
      * byte range.
      */
-    private void initializeLogPages(PhysicalLogLocator physicalLogLocator) throws ACIDException {
+    private void initializeLogPages(long beginLsn) throws ACIDException {
         try {
-            String filePath = LogUtil.getLogFilePath(logManagerProperties,
-                    LogUtil.getFileId(this, physicalLogLocator.getLsn()));
+            String filePath = LogUtil.getLogFilePath(logManagerProperties, LogUtil.getFileId(this, beginLsn));
+            long nextDiskWriteOffset = LogUtil.getFileOffset(this, beginLsn);
+            long nextBufferWriteOffset = nextDiskWriteOffset % logPageSize;
+            long bufferBeginOffset = nextDiskWriteOffset - nextBufferWriteOffset;
+
             for (int i = 0; i < numLogPages; i++) {
-                logPages[i] = FileUtil.getFileBasedBuffer(
-                        filePath,
-                        LogUtil.getFileOffset(this, physicalLogLocator.getLsn()) + i
-                                * logManagerProperties.getLogPageSize(), logManagerProperties.getLogPageSize());
+                logPages[i] = FileUtil.getFileBasedBuffer(filePath, bufferBeginOffset + i * logPageSize, logPageSize,
+                        logManagerProperties.getDiskSectorSize());
+                if (i == 0) {
+                    logPages[i].setBufferLastFlushOffset((int) nextBufferWriteOffset);
+                    logPages[i].setBufferNextWriteOffset((int) nextBufferWriteOffset);
+                    logPages[i].setDiskNextWriteOffset(nextDiskWriteOffset);
+                }
             }
         } catch (Exception e) {
             e.printStackTrace();
@@ -764,10 +704,6 @@
         return logPages[pageIndex];
     }
 
-    public AtomicInteger getLogPageOwnershipCount(int pageIndex) {
-        return logPageOwnerCount[pageIndex];
-    }
-
     public IFileBasedBuffer[] getLogPages() {
         return logPages;
     }
@@ -829,7 +765,7 @@
      */
     private final LinkedBlockingQueue<Object>[] flushRequestQueue;
     private final Object[] flushRequests;
-    private int pageToFlush;
+    private int flushPageIndex;
     private final long groupCommitWaitPeriod;
     private boolean isRenewRequest;
 
@@ -843,14 +779,14 @@
             flushRequestQueue[i] = new LinkedBlockingQueue<Object>(1);
             flushRequests[i] = new Object();
         }
-        this.pageToFlush = -1;
+        this.flushPageIndex = 0;
         groupCommitWaitPeriod = logManager.getLogManagerProperties().getGroupCommitWaitPeriod();
         isRenewRequest = false;
     }
 
     public void renew() {
         isRenewRequest = true;
-        pageToFlush = -1;
+        flushPageIndex = 0;
         this.interrupt();
         isRenewRequest = false;
     }
@@ -886,15 +822,19 @@
 
     @Override
     public void run() {
+        int logPageSize = logManager.getLogManagerProperties().getLogPageSize();
+        int logBufferSize = logManager.getLogManagerProperties().getLogBufferSize();
+        int beforeFlushOffset = 0;
+        int afterFlushOffset = 0;
+        boolean resetFlushPageIndex = false;
+
         while (true) {
             try {
-                pageToFlush = logManager.getNextPageInSequence(pageToFlush);
-
                 // A wait call on the linkedBLockingQueue. The flusher thread is
                 // notified when an object is added to the queue. Please note
                 // that each page has an associated blocking queue.
                 try {
-                    flushRequestQueue[pageToFlush].take();
+                    flushRequestQueue[flushPageIndex].take();
                 } catch (InterruptedException ie) {
                     while (isRenewRequest) {
                         sleep(1);
@@ -902,58 +842,67 @@
                     continue;
                 }
 
-                synchronized (logManager.getLogPage(pageToFlush)) {
-
-                    // #. sleep during the groupCommitWaitTime
+                //if the log page is already full, don't wait. 
+                if (logManager.getLogPage(flushPageIndex).getBufferNextWriteOffset() < logPageSize
+                        - logManager.getLogRecordHelper().getCommitLogSize()) {
+                    // #. sleep for the groupCommitWaitTime
                     sleep(groupCommitWaitPeriod);
+                }
 
-                    // #. set the logPageStatus to INACTIVE in order to prevent
-                    // other txns from writing on this page.
-                    logManager.getLogPageStatus(pageToFlush).set(PageState.INACTIVE);
+                synchronized (logManager.getLogPage(flushPageIndex)) {
+                    logManager.getLogPage(flushPageIndex).acquireWriteLatch();
+                    try {
 
-                    // #. need to wait until the logPageOwnerCount reaches 1
-                    // (LOG_WRITER)
-                    // meaning every one has finished writing logs on this page.
-                    while (logManager.getLogPageOwnershipCount(pageToFlush).get() != PageOwnershipStatus.LOG_WRITER) {
-                        sleep(0);
+                        // #. need to wait until the reference count reaches 0
+                        while (logManager.getLogPage(flushPageIndex).getRefCnt() != 0) {
+                            sleep(0);
+                        }
+
+                        beforeFlushOffset = logManager.getLogPage(flushPageIndex).getBufferLastFlushOffset();
+
+                        // put the content to disk (the thread still has a lock on the log page)
+                        logManager.getLogPage(flushPageIndex).flush();
+
+                        afterFlushOffset = logManager.getLogPage(flushPageIndex).getBufferLastFlushOffset();
+
+                        // increment the last flushed lsn
+                        logManager.incrementLastFlushedLsn(afterFlushOffset - beforeFlushOffset);
+
+                        // increment currentLSN if currentLSN is less than flushLSN.
+                        if (logManager.getLastFlushedLsn().get() + 1 > logManager.getCurrentLsn().get()) {
+                            logManager.getCurrentLsn().set(logManager.getLastFlushedLsn().get() + 1);
+                        }
+                        
+                        // Map the log page to a new region in the log file if the flushOffset reached the logPageSize
+                        if (afterFlushOffset == logPageSize) {
+                            long diskNextWriteOffset = logManager.getLogPages()[flushPageIndex].getDiskNextWriteOffset()
+                                    + logBufferSize;
+                            logManager.resetLogPage(logManager.getLastFlushedLsn().get() + 1 + logBufferSize,
+                                    diskNextWriteOffset, flushPageIndex);
+                            resetFlushPageIndex = true;
+                        }
+                        
+                        // decrement activeTxnCountOnIndexes
+                        logManager.decrementActiveTxnCountOnIndexes(flushPageIndex);
+
+                    } finally {
+                        logManager.getLogPage(flushPageIndex).releaseWriteLatch();
                     }
 
-                    // #. set the logPageOwnerCount to 0 (LOG_FLUSHER)
-                    // meaning it is flushing.
-                    logManager.getLogPageOwnershipCount(pageToFlush).set(PageOwnershipStatus.LOG_FLUSHER);
-
-                    // put the content to disk (the thread still has a lock on
-                    // the log page)
-                    logManager.getLogPage(pageToFlush).flush();
-
-                    // Map the log page to a new region in the log file.
-                    long nextWritePosition = logManager.getLogPages()[pageToFlush].getNextWritePosition()
-                            + logManager.getLogManagerProperties().getLogBufferSize();
-
-                    logManager.resetLogPage(logManager.getLastFlushedLsn().get() + 1
-                            + logManager.getLogManagerProperties().getLogBufferSize(), nextWritePosition, pageToFlush);
-
-                    // increment the last flushed lsn and lastFlushedPage
-                    logManager.incrementLastFlushedLsn(logManager.getLogManagerProperties().getLogPageSize());
-
-                    // decrement activeTxnCountOnIndexes
-                    logManager.decrementActiveTxnCountOnIndexes(pageToFlush);
-
-                    // reset the count to 1
-                    logManager.getLogPageOwnershipCount(pageToFlush).set(PageOwnershipStatus.LOG_WRITER);
-
-                    // mark the page as ACTIVE
-                    logManager.getLogPageStatus(pageToFlush).set(LogManager.PageState.ACTIVE);
-
                     // #. checks the queue whether there is another flush
                     // request on the same log buffer
                     // If there is another request, then simply remove it.
-                    if (flushRequestQueue[pageToFlush].peek() != null) {
-                        flushRequestQueue[pageToFlush].take();
+                    if (flushRequestQueue[flushPageIndex].peek() != null) {
+                        flushRequestQueue[flushPageIndex].take();
                     }
 
                     // notify all waiting (transaction) threads.
-                    logManager.getLogPage(pageToFlush).notifyAll();
+                    logManager.getLogPage(flushPageIndex).notifyAll();
+
+                    if (resetFlushPageIndex) {
+                        flushPageIndex = logManager.getNextPageInSequence(flushPageIndex);
+                        resetFlushPageIndex = false;
+                    }
                 }
             } catch (IOException ioe) {
                 ioe.printStackTrace();
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java
index 5040fa9..581ce4c 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java
@@ -28,6 +28,7 @@
     public static final String NUM_LOG_PAGES_KEY = "num_log_pages";
     public static final String LOG_FILE_PREFIX_KEY = "log_file_prefix";
     public static final String GROUP_COMMIT_WAIT_PERIOD_KEY = "group_commit_wait_period";
+    public static final String DISK_SECTOR_SIZE_KEY = "disk_sector_size";
 
     private static final int DEFAULT_LOG_PAGE_SIZE = 128 * 1024; //128KB
     private static final int DEFAULT_NUM_LOG_PAGES = 8;
@@ -35,6 +36,7 @@
     private static final long DEFAULT_GROUP_COMMIT_WAIT_PERIOD = 200; // time in millisec.
     private static final String DEFAULT_LOG_FILE_PREFIX = "asterix_transaction_log";
     private static final String DEFAULT_LOG_DIRECTORY = "asterix_logs/";
+    private static final int DEFAULT_DISK_SECTOR_SIZE = 4096;
 
     // follow the naming convention <logFilePrefix>_<number> where number starts from 0
     private final String logFilePrefix;
@@ -51,6 +53,8 @@
     private final int logBufferSize;
     // maximum size of each log file
     private final long logPartitionSize;
+    // default disk sector size
+    private final int diskSectorSize;
 
     public LogManagerProperties(Properties properties, String nodeId) {
         this.logDirKey = new String(nodeId + LOG_DIR_SUFFIX_KEY);
@@ -66,6 +70,8 @@
         this.logBufferSize = logPageSize * numLogPages;
         //make sure that the log partition size is the multiple of log buffer size.
         this.logPartitionSize = (logPartitionSize / logBufferSize) * logBufferSize;
+        this.diskSectorSize = Integer.parseInt(properties.getProperty(DISK_SECTOR_SIZE_KEY, ""
+                + DEFAULT_DISK_SECTOR_SIZE));
     }
 
     public long getLogPartitionSize() {
@@ -99,6 +105,10 @@
     public String getLogDirKey() {
         return logDirKey;
     }
+    
+    public int getDiskSectorSize() {
+        return diskSectorSize;
+    }
 
     public String toString() {
         StringBuilder builder = new StringBuilder();
@@ -108,6 +118,7 @@
         builder.append("num_log_pages : " + numLogPages + FileUtil.lineSeparator);
         builder.append("log_partition_size : " + logPartitionSize + FileUtil.lineSeparator);
         builder.append("group_commit_wait_period : " + groupCommitWaitPeriod + FileUtil.lineSeparator);
+        builder.append("disk_sector_size : " + diskSectorSize + FileUtil.lineSeparator);
         return builder.toString();
     }
 }
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogRecordHelper.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogRecordHelper.java
index 6b882ef..1b65d8f 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogRecordHelper.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogRecordHelper.java
@@ -50,6 +50,9 @@
 public class LogRecordHelper implements ILogRecordHelper {
 
     private final int LOG_CHECKSUM_SIZE = 8;
+    private final int LOG_HEADER_PART1_SIZE = 17;
+    private final int LOG_HEADER_PART2_SIZE = 21;
+    private final int COMMIT_LOG_SIZE = LOG_HEADER_PART1_SIZE + LOG_CHECKSUM_SIZE;
 
     private final int MAGIC_NO_POS = 0;
     private final int LOG_TYPE_POS = 4;
@@ -60,7 +63,9 @@
     private final int RESOURCE_ID_POS = 25;
     private final int RESOURCE_MGR_ID_POS = 33;
     private final int LOG_RECORD_SIZE_POS = 34;
+    
 
+    
     private ILogManager logManager;
 
     public LogRecordHelper(ILogManager logManager) {
@@ -118,7 +123,11 @@
 
     @Override
     public int getLogContentSize(LogicalLogLocator logicalLogLocater) {
-        return logicalLogLocater.getBuffer().readInt(logicalLogLocater.getMemoryOffset() + LOG_RECORD_SIZE_POS);
+        if (getLogType(logicalLogLocater) == LogType.COMMIT || getLogType(logicalLogLocater) == LogType.ENTITY_COMMIT) {
+            return 0;
+        } else {
+            return logicalLogLocater.getBuffer().readInt(logicalLogLocater.getMemoryOffset() + LOG_RECORD_SIZE_POS);
+        }
     }
 
     @Override
@@ -178,7 +187,7 @@
 
         /* magic no */
         (logicalLogLocator.getBuffer()).writeInt(logicalLogLocator.getMemoryOffset() + MAGIC_NO_POS,
-                logManager.getLogManagerProperties().LOG_MAGIC_NUMBER);
+                LogManagerProperties.LOG_MAGIC_NUMBER);
 
         /* log type */
         (logicalLogLocator.getBuffer()).put(logicalLogLocator.getMemoryOffset() + LOG_TYPE_POS, logType);
@@ -230,18 +239,18 @@
     @Override
     public int getLogRecordSize(byte logType, int logBodySize) {
         if (logType == LogType.UPDATE) {
-            return 46 + logBodySize;
+            return LOG_HEADER_PART1_SIZE + LOG_HEADER_PART2_SIZE + LOG_CHECKSUM_SIZE + logBodySize;
         } else {
-            return 25;
+            return COMMIT_LOG_SIZE;
         }
     }
 
     @Override
     public int getLogHeaderSize(byte logType) {
         if (logType == LogType.UPDATE) {
-            return 38;
+            return LOG_HEADER_PART1_SIZE + LOG_HEADER_PART2_SIZE;
         } else {
-            return 17;
+            return LOG_HEADER_PART1_SIZE;
         }
     }
 
@@ -249,4 +258,8 @@
     public int getLogChecksumSize() {
         return LOG_CHECKSUM_SIZE;
     }
+    
+    public int getCommitLogSize() {
+        return COMMIT_LOG_SIZE;
+    }
 }
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogUtil.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogUtil.java
index feaca86..77a99bc 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogUtil.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogUtil.java
@@ -47,6 +47,9 @@
                 List<String> logFiles = getLogFiles(logManagerProperties);
                 if (logFiles == null || logFiles.size() == 0) {
                     FileUtil.createFileIfNotExists(getLogFilePath(logManagerProperties, 0));
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("created a log file: " + getLogFilePath(logManagerProperties, 0));
+                    }
                 } else {
                     File logFile = new File(LogUtil.getLogFilePath(logManagerProperties,
                             Long.parseLong(logFiles.get(logFiles.size() - 1))));
@@ -55,7 +58,13 @@
                 }
             } else {
                 FileUtil.createNewDirectory(logManagerProperties.getLogDir());
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("created the log directory: " + logManagerProperties.getLogDir());
+                }
                 FileUtil.createFileIfNotExists(getLogFilePath(logManagerProperties, 0));
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("created a log file: " + getLogFilePath(logManagerProperties, 0));
+                }
             }
         } catch (IOException ioe) {
             throw new ACIDException("Unable to initialize log anchor", ioe);
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java
index c27ae20..47c5dcb 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java
@@ -116,6 +116,9 @@
             //This is initial bootstrap. 
             //Otherwise, the checkpoint file is deleted unfortunately. What we can do in this case?
             state = SystemState.NEW_UNIVERSE;
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("The checkpoint file doesn't exist: systemState = NEW_UNIVERSE");
+            }
             return state;
         }
 
@@ -392,7 +395,7 @@
         }
 
         if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("[RecoveryMgr] recovery is over");
+            LOGGER.info("[RecoveryMgr] recovery is completed.");
         }
         if (IS_DEBUG_MODE) {
             System.out.println("[RecoveryMgr] Count: Update/Commit/Redo = " + updateLogCount + "/" + commitLogCount
@@ -422,6 +425,10 @@
     @Override
     public synchronized void checkpoint(boolean isSharpCheckpoint) throws ACIDException {
 
+        if (isSharpCheckpoint && LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Starting sharp checkpoint ... ");
+        }
+        
         LogManager logMgr = (LogManager) txnSubsystem.getLogManager();
         TransactionManager txnMgr = (TransactionManager) txnSubsystem.getTransactionManager();
         String logDir = logMgr.getLogManagerProperties().getLogDir();
@@ -513,6 +520,10 @@
         if (isSharpCheckpoint) {
             logMgr.renewLogFiles();
         }
+        
+        if (isSharpCheckpoint && LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Completed sharp checkpoint.");
+        }
     }
 
     private CheckpointObject readCheckpoint() throws ACIDException, FileNotFoundException {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java
index 65512ec..17eaf38 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java
@@ -4,12 +4,22 @@
 
 public class DatasetIdFactory {
     private static AtomicInteger id = new AtomicInteger();
+    private static boolean isInitialized = false; 
+    
+    public static boolean isInitialized() {
+        return isInitialized;
+    }
     
     public static void initialize(int initialId) {
     	id.set(initialId);
+    	isInitialized = true;
     }
 
     public static int generateDatasetId() {
         return id.incrementAndGet();
     }
+    
+    public static int getMostRecentDatasetId() {
+        return id.get();
+    }
 }
\ No newline at end of file
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManagementConstants.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManagementConstants.java
index 3d25e54..7d04f61 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManagementConstants.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManagementConstants.java
@@ -29,7 +29,6 @@
         public static final String LOG_CONF_DIR = "log_conf";
         public static final String LOG_CONF_FILE = "log.properties";
         public static final String ASTERIX_CONF_DIR = "src/main/resources";
-        public static final String ASTERIX_CONF_FILE = "test.properties";
         public static final String DEFAULT_LOG_DIR = "asterix_logs";
         public static final int TERMINAL_LSN = -1;
     }
diff --git a/pom.xml b/pom.xml
index 20efdb3..f4c46c6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -84,6 +84,7 @@
                 <module>asterix-server</module>
                 <module>asterix-installer</module>
                 <module>asterix-events</module>
+                <module>asterix-doc</module>
         </modules>
 
 	<repositories>